--- /dev/null
-aes_dep = dependency('openssl', required : get_option('aes'))
+ aes_sources = [
+ 'gstaes.c',
+ 'gstaeshelper.c',
+ 'gstaesenc.c',
+ 'gstaesdec.c',
+ ]
+
+ aes_cargs = []
++aes_dep = dependency('openssl1.1', required : get_option('aes'))
+ if aes_dep.found()
+ aes_cargs += ['-DHAVE_OPENSSL']
+ else
+ subdir_done()
+ endif
+
+ gstaes = library('gstaes',
+ aes_sources,
+ c_args : gst_plugins_bad_args + aes_cargs,
+ link_args : noseh_link_args,
+ include_directories : [configinc],
+ dependencies : [gstpbutils_dep, gstvideo_dep,
+ aes_dep, gio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gstaes, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gstaes]
+ aes_dep = declare_dependency(include_directories : include_directories('.'))
--- /dev/null
- gst_pad_set_caps (b->src, gst_static_pad_template_get_caps (&src_template));
+ /* GStreamer bz2 encoder
+ * Copyright (C) 2006 Lutz Müller <lutz topfrose de>
+
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+ #include "gstbz2enc.h"
+
+ #include <bzlib.h>
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY_STATIC (bz2enc_debug);
+ #define GST_CAT_DEFAULT bz2enc_debug
+
+ static GstStaticPadTemplate sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("ANY"));
+ static GstStaticPadTemplate src_template =
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-bzip"));
+
+ #define DEFAULT_BLOCK_SIZE 6
+ #define DEFAULT_BUFFER_SIZE 1024
+
+ enum
+ {
+ PROP_0,
+ PROP_BLOCK_SIZE,
+ PROP_BUFFER_SIZE
+ };
+
+ struct _GstBz2enc
+ {
+ GstElement parent;
+
+ GstPad *sink;
+ GstPad *src;
+
+ /* Properties */
+ guint block_size;
+ guint buffer_size;
+
+ gboolean ready;
+ bz_stream stream;
+ guint64 offset;
+ };
+
+ struct _GstBz2encClass
+ {
+ GstElementClass parent_class;
+ };
+
+ #define gst_bz2enc_parent_class parent_class
+ G_DEFINE_TYPE (GstBz2enc, gst_bz2enc, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE (bz2enc, "bz2enc", GST_RANK_NONE, GST_TYPE_BZ2ENC);
+
+ static void
+ gst_bz2enc_compress_end (GstBz2enc * b)
+ {
+ g_return_if_fail (GST_IS_BZ2ENC (b));
+
+ if (b->ready) {
+ BZ2_bzCompressEnd (&b->stream);
+ memset (&b->stream, 0, sizeof (b->stream));
+ b->ready = FALSE;
+ }
+ }
+
+ static void
+ gst_bz2enc_compress_init (GstBz2enc * b)
+ {
+ g_return_if_fail (GST_IS_BZ2ENC (b));
+
+ gst_bz2enc_compress_end (b);
+ b->offset = 0;
+ switch (BZ2_bzCompressInit (&b->stream, b->block_size, 0, 0)) {
+ case BZ_OK:
+ b->ready = TRUE;
+ return;
+ default:
+ b->ready = FALSE;
+ GST_ELEMENT_ERROR (b, CORE, FAILED, (NULL),
+ ("Failed to start compression."));
+ return;
+ }
+ }
+
+ static gboolean
+ gst_bz2enc_event (GstPad * pad, GstObject * parent, GstEvent * e)
+ {
+ GstBz2enc *b;
+ gboolean ret;
+
+ b = GST_BZ2ENC (parent);
+ switch (GST_EVENT_TYPE (e)) {
+ case GST_EVENT_EOS:{
+ GstFlowReturn flow = GST_FLOW_OK;
+ int r = BZ_FINISH_OK;
+
+ do {
+ GstBuffer *out;
+ GstMapInfo omap;
+ guint n;
+
+ out = gst_buffer_new_and_alloc (b->buffer_size);
+
+ gst_buffer_map (out, &omap, GST_MAP_WRITE);
+ b->stream.next_out = (char *) omap.data;
+ b->stream.avail_out = omap.size;
+ r = BZ2_bzCompress (&b->stream, BZ_FINISH);
+ gst_buffer_unmap (out, &omap);
+ if ((r != BZ_FINISH_OK) && (r != BZ_STREAM_END)) {
+ GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL),
+ ("Failed to finish to compress (error code %i).", r));
+ gst_buffer_unref (out);
+ break;
+ }
+
+ n = gst_buffer_get_size (out);
+ if (b->stream.avail_out >= n) {
+ gst_buffer_unref (out);
+ break;
+ }
+
+ gst_buffer_resize (out, 0, n - b->stream.avail_out);
+ n = gst_buffer_get_size (out);
+ GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n;
+
+ flow = gst_pad_push (b->src, out);
+
+ if (flow != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (b, "push on EOS failed: %s",
+ gst_flow_get_name (flow));
+ break;
+ }
+ } while (r != BZ_STREAM_END);
+
+ ret = gst_pad_event_default (pad, parent, e);
+
+ if (r != BZ_STREAM_END || flow != GST_FLOW_OK)
+ ret = FALSE;
+
+ gst_bz2enc_compress_init (b);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, e);
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_bz2enc_chain (GstPad * pad, GstObject * parent, GstBuffer * in)
+ {
+ GstFlowReturn flow = GST_FLOW_OK;
+ GstBuffer *out;
+ GstBz2enc *b;
+ guint n;
+ int bz2_ret;
+ GstMapInfo map = GST_MAP_INFO_INIT, omap;
+
+ b = GST_BZ2ENC (parent);
+
+ if (!b->ready)
+ goto not_ready;
+
+ gst_buffer_map (in, &map, GST_MAP_READ);
+ b->stream.next_in = (char *) map.data;
+ b->stream.avail_in = map.size;
+ while (b->stream.avail_in) {
+ out = gst_buffer_new_and_alloc (b->buffer_size);
+
+ gst_buffer_map (out, &omap, GST_MAP_WRITE);
+ b->stream.next_out = (char *) omap.data;
+ b->stream.avail_out = omap.size;
+ bz2_ret = BZ2_bzCompress (&b->stream, BZ_RUN);
+ gst_buffer_unmap (out, &omap);
+ if (bz2_ret != BZ_RUN_OK)
+ goto compress_error;
+
+ n = gst_buffer_get_size (out);
+ if (b->stream.avail_out >= n) {
+ gst_buffer_unref (out);
+ break;
+ }
+
+ gst_buffer_resize (out, 0, n - b->stream.avail_out);
+ n = gst_buffer_get_size (out);
+ GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n;
+
+ flow = gst_pad_push (b->src, out);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ b->offset += n;
+ }
+
+ done:
+
+ gst_buffer_unmap (in, &map);
+ gst_buffer_unref (in);
+ return flow;
+
+ /* ERRORS */
+ not_ready:
+ {
+ GST_ELEMENT_ERROR (b, LIBRARY, FAILED, (NULL), ("Compressor not ready."));
+ flow = GST_FLOW_FLUSHING;
+ goto done;
+ }
+ compress_error:
+ {
+ GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL),
+ ("Failed to compress data (error code %i)", bz2_ret));
+ gst_bz2enc_compress_init (b);
+ gst_buffer_unref (out);
+ flow = GST_FLOW_ERROR;
+ goto done;
+ }
+ }
+
+ static void
+ gst_bz2enc_init (GstBz2enc * b)
+ {
++ GstCaps *tmp = NULL;
+ b->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (b->sink, GST_DEBUG_FUNCPTR (gst_bz2enc_chain));
+ gst_pad_set_event_function (b->sink, GST_DEBUG_FUNCPTR (gst_bz2enc_event));
+ gst_element_add_pad (GST_ELEMENT (b), b->sink);
+
+ b->src = gst_pad_new_from_static_template (&src_template, "src");
++ gst_pad_set_caps (b->src, tmp = gst_static_pad_template_get_caps (&src_template));
+ gst_pad_use_fixed_caps (b->src);
+ gst_element_add_pad (GST_ELEMENT (b), b->src);
+
++ gst_caps_unref(tmp);
+ b->block_size = DEFAULT_BLOCK_SIZE;
+ b->buffer_size = DEFAULT_BUFFER_SIZE;
+ gst_bz2enc_compress_init (b);
+ }
+
+ static void
+ gst_bz2enc_finalize (GObject * object)
+ {
+ GstBz2enc *b = GST_BZ2ENC (object);
+
+ gst_bz2enc_compress_end (b);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_bz2enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstBz2enc *b = GST_BZ2ENC (object);
+
+ switch (prop_id) {
+ case PROP_BLOCK_SIZE:
+ g_value_set_uint (value, b->block_size);
+ break;
+ case PROP_BUFFER_SIZE:
+ g_value_set_uint (value, b->buffer_size);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ gst_bz2enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstBz2enc *b = GST_BZ2ENC (object);
+
+ switch (prop_id) {
+ case PROP_BLOCK_SIZE:
+ b->block_size = g_value_get_uint (value);
+ gst_bz2enc_compress_init (b);
+ break;
+ case PROP_BUFFER_SIZE:
+ b->buffer_size = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ gst_bz2enc_class_init (GstBz2encClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_bz2enc_finalize;
+ gobject_class->set_property = gst_bz2enc_set_property;
+ gobject_class->get_property = gst_bz2enc_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BLOCK_SIZE,
+ g_param_spec_uint ("block-size", "Block size", "Block size",
+ 1, 9, DEFAULT_BLOCK_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BUFFER_SIZE,
+ g_param_spec_uint ("buffer-size", "Buffer size", "Buffer size",
+ 1, G_MAXUINT, DEFAULT_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_set_static_metadata (gstelement_class, "BZ2 encoder",
+ "Codec/Encoder", "Compresses streams",
+ "Lutz Mueller <lutz@users.sourceforge.net>");
+
+ GST_DEBUG_CATEGORY_INIT (bz2enc_debug, "bz2enc", 0, "BZ2 compressor");
+ }
--- /dev/null
- *
+ /*
+ * DASH demux plugin for GStreamer
+ *
+ * gstdashdemux.c
+ *
+ * Copyright (C) 2012 Orange
+ *
+ * Authors:
+ * David Corvoysier <david.corvoysier@orange.com>
+ * Hamid Zakari <hamid.zakari@gmail.com>
+ *
+ * Copyright (C) 2013 Smart TV Alliance
+ * Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+ /**
+ * SECTION:element-dashdemux
+ * @title: dashdemux
+ *
+ * DASH demuxer element.
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd"
+ * ]|
+ */
+
+ /* Implementation notes:
+ *
+ * The following section describes how dashdemux works internally.
+ *
+ * Introduction:
+ *
+ * dashdemux is a "fake" demux, as unlike traditional demux elements, it
+ * doesn't split data streams contained in an envelope to expose them to
+ * downstream decoding elements.
+ *
+ * Instead, it parses an XML file called a manifest to identify a set of
+ * individual stream fragments it needs to fetch and expose to the actual
+ * demux elements that will handle them (this behavior is sometimes
+ * referred as the "demux after a demux" scenario).
+ *
+ * For a given section of content, several representations corresponding
+ * to different bitrates may be available: dashdemux will select the most
+ * appropriate representation based on local conditions (typically the
+ * available bandwidth and the amount of buffering available, capped by
+ * a maximum allowed bitrate).
+ *
+ * The representation selection algorithm can be configured using
+ * specific properties: max bitrate, min/max buffering, bandwidth ratio.
+ *
+ *
+ * General Design:
+ *
+ * dashdemux has a single sink pad that accepts the data corresponding
+ * to the manifest, typically fetched from an HTTP or file source.
+ *
+ * dashdemux exposes the streams it recreates based on the fragments it
+ * fetches through dedicated src pads corresponding to the caps of the
+ * fragments container (ISOBMFF/MP4 or MPEG2TS).
+ *
+ * During playback, new representations will typically be exposed as a
+ * new set of pads (see 'Switching between representations' below).
+ *
+ * Fragments downloading is performed using a dedicated task that fills
+ * an internal queue. Another task is in charge of popping fragments
+ * from the queue and pushing them downstream.
+ *
+ * Switching between representations:
+ *
+ * Decodebin supports scenarios allowing to seamlessly switch from one
+ * stream to another inside the same "decoding chain".
+ *
+ * To achieve that, it combines the elements it autoplugged in chains
+ * and groups, allowing only one decoding group to be active at a given
+ * time for a given chain.
+ *
+ * A chain can signal decodebin that it is complete by sending a
+ * no-more-pads event, but even after that new pads can be added to
+ * create new subgroups, providing that a new no-more-pads event is sent.
+ *
+ * We take advantage of that to dynamically create a new decoding group
+ * in order to select a different representation during playback.
+ *
+ * Typically, assuming that each fragment contains both audio and video,
+ * the following tree would be created:
+ *
+ * chain "DASH Demux"
+ * |_ group "Representation set 1"
+ * | |_ chain "Qt Demux 0"
+ * | |_ group "Stream 0"
+ * | |_ chain "H264"
+ * | |_ chain "AAC"
+ * |_ group "Representation set 2"
+ * |_ chain "Qt Demux 1"
+ * |_ group "Stream 1"
+ * |_ chain "H264"
+ * |_ chain "AAC"
+ *
+ * Or, if audio and video are contained in separate fragments:
+ *
+ * chain "DASH Demux"
+ * |_ group "Representation set 1"
+ * | |_ chain "Qt Demux 0"
+ * | | |_ group "Stream 0"
+ * | | |_ chain "H264"
+ * | |_ chain "Qt Demux 1"
+ * | |_ group "Stream 1"
+ * | |_ chain "AAC"
+ * |_ group "Representation set 2"
+ * |_ chain "Qt Demux 3"
+ * | |_ group "Stream 2"
+ * | |_ chain "H264"
+ * |_ chain "Qt Demux 4"
+ * |_ group "Stream 3"
+ * |_ chain "AAC"
+ *
+ * In both cases, when switching from Set 1 to Set 2 an EOS is sent on
+ * each end pad corresponding to Rep 0, triggering the "drain" state to
+ * propagate upstream.
+ * Once both EOS have been processed, the "Set 1" group is completely
+ * drained, and decodebin2 will switch to the "Set 2" group.
+ *
+ * Note: nothing can be pushed to the new decoding group before the
+ * old one has been drained, which means that in order to be able to
+ * adapt quickly to bandwidth changes, we will not be able to rely
+ * on downstream buffering, and will instead manage an internal queue.
+ *
+ *
+ * Keyframe trick-mode implementation:
+ *
+ * When requested (with GST_SEEK_FLAG_TRICKMODE_KEY_UNIT) and if the format
+ * is supported (ISOBMFF profiles), dashdemux can download only keyframes
+ * in order to provide fast forward/reverse playback without exceeding the
+ * available bandwidth/cpu/memory usage.
+ *
+ * This is done in two parts:
+ * 1) Parsing ISOBMFF atoms to detect the location of keyframes and only
+ * download/push those.
+ * 2) Deciding what the ideal next keyframe to download is in order to
+ * provide as many keyframes as possible without rebuffering.
+ *
+ * * Keyframe-only downloads:
+ *
+ * For each beginning of fragment, the fragment header will be parsed in
+ * gst_dash_demux_parse_isobmff() and then the information (offset, pts...)
+ * of each keyframe will be stored in moof_sync_samples.
+ *
+ * gst_dash_demux_stream_update_fragment_info() will specify the range
+ * start and end of the current keyframe, which will cause GstAdaptiveDemux
+ * to do a new upstream range request.
+ *
+ * When advancing, if there are still some keyframes in the current
+ * fragment, gst_dash_demux_stream_advance_fragment() will call
+ * gst_dash_demux_stream_advance_sync_sample() which decides what the next
+ * keyframe to get will be (it can be in reverse order for example, or
+ * might not be the *next* keyframe but one further as explained below).
+ *
+ * If no more keyframes are available in the current fragment, dash will
+ * advance to the next fragment (just like in the normal case) or to a
+ * fragment much further away (as explained below).
+ *
+ *
+ * * Deciding the optimal "next" keyframe/fragment to download:
+ *
+ * The main reason for doing keyframe-only downloads is for trick-modes
+ * (i.e. being able to do fast reverse/forward playback with limited
+ * bandwidth/cpu/memory).
+ *
+ * Downloading all keyframes might not be the optimal solution, especially
+ * at high playback rates, since the time taken to download the keyframe
+ * might exceed the available running time between two displayed frames
+ * (i.e. all frames would end up arriving late). This would cause severe
+ * rebuffering.
+ *
+ * Note: The values specified below can be in either the segment running
+ * time or in absolute values. Where position values need to be converted
+ * to segment running time the "running_time(val)" notation is used, and
+ * where running time need ot be converted to segment poisition the
+ * "position(val)" notation is used.
+ *
+ * The goal instead is to be able to download/display as many frames as
+ * possible for a given playback rate. For that the implementation will
+ * take into account:
+ * * The requested playback rate and segment
+ * * The average time to request and download a keyframe (in running time)
+ * * The current position of dashdemux in the stream
+ * * The current downstream (i.e. sink) position (in running time)
+ *
+ * To reach this goal we consider that there is some amount of buffering
+ * (in time) between dashdemux and the display sink. While we do not know
+ * the exact amount of buffering available, a safe and reasonable assertion
+ * is that there is at least a second (in running time).
+ *
+ * The average time to request and fully download a keyframe (with or
+ * without fragment header) is obtained by averaging the
+ * GstAdaptiveDemuxStream->last_download_time and is stored in
+ * GstDashDemuxStream->average_download_time. Those values include the
+ * network latency and full download time, which are more interesting and
+ * correct than just bitrates (with small download sizes, the impact of the
+ * network latency is much higher).
+ *
+ * The current position is calculated based on the fragment timestamp and
+ * the current keyframe index within that fragment. It is stored in
+ * GstDashDemuxStream->actual_position.
+ *
+ * The downstream position of the pipeline is obtained via QoS events and
+ * is stored in GstAdaptiveDemux (note: it's a running time value).
+ *
+ * The estimated buffering level between dashdemux and downstream is
+ * therefore:
+ * buffering_level = running_time(actual_position) - qos_earliest_time
+ *
+ * In order to avoid rebuffering, we want to ensure that the next keyframe
+ * (including potential fragment header) we request will be download, demuxed
+ * and decoded in time so that it is not late. That next keyframe time is
+ * called the "target_time" and is calculated whenever we have finished
+ * pushing a keyframe downstream.
+ *
+ * One simple observation at this point is that we *need* to make sure that
+ * the target time is chosen such that:
+ * running_time(target_time) > qos_earliest_time + average_download_time
+ *
+ * i.e. we chose a target time which will be greater than the time at which
+ * downstream will be once we request and download the keyframe (otherwise
+ * we're guaranteed to be late).
+ *
+ * This would provide the highest number of displayed frames per
+ * second, but it is just a *minimal* value and is not enough as-is,
+ * since it doesn't take into account the following items which could
+ * cause frames to arrive late (and therefore rebuffering):
+ * * Network jitter (i.e. by how much the download time can fluctuate)
+ * * Network stalling
+ * * Different keyframe sizes (and therefore download time)
+ * * Decoding speed
+ *
+ * Instead, we adjust the target time calculation based on the
+ * buffering_level.
+ *
+ * The smaller the buffering level is (i.e. the closer we are between
+ * current and downstream), the more aggressively we skip forward (and
+ * guarantee the keyframe will be downloaded, decoded and displayed in
+ * time). And the higher the buffering level, the least aggresivelly
+ * we need to skip forward (and therefore display more frames per
+ * second).
+ *
+ * Right now the threshold for aggressive switching is set to 3
+ * average_download_time. Below that buffering level we set the target time
+ * to at least 3 average_download_time distance beyond the
+ * qos_earliest_time.
+ *
+ * If we are above that buffering level we set the target time to:
+ * position(running_time(position) + average_download_time)
+ *
+ * The logic is therefore:
+ * WHILE(!EOS)
+ * Calculate target_time
+ * Advance to keyframe/fragment for that target_time
+ * Adaptivedemux downloads that keyframe/fragment
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <inttypes.h>
+ #include <gio/gio.h>
+ #include <gst/base/gsttypefindhelper.h>
+ #include <gst/tag/tag.h>
+ #include <gst/net/gstnet.h>
+ #include "gst/gst-i18n-plugin.h"
+ #include "gstdashdemux.h"
+ #include "gstdash_debug.h"
+
+ static GstStaticPadTemplate gst_dash_demux_videosrc_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%02u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate gst_dash_demux_audiosrc_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%02u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate gst_dash_demux_subtitlesrc_template =
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%02u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/dash+xml"));
+
+ GST_DEBUG_CATEGORY (gst_dash_demux_debug);
+ #define GST_CAT_DEFAULT gst_dash_demux_debug
+
+ enum
+ {
+ PROP_0,
+
+ PROP_MAX_BUFFERING_TIME,
+ PROP_BANDWIDTH_USAGE,
+ PROP_MAX_BITRATE,
+ PROP_MAX_VIDEO_WIDTH,
+ PROP_MAX_VIDEO_HEIGHT,
+ PROP_MAX_VIDEO_FRAMERATE,
+ PROP_PRESENTATION_DELAY,
+ PROP_LAST
+ };
+
+ /* Default values for properties */
+ #define DEFAULT_MAX_BUFFERING_TIME 30 /* in seconds */
+ #define DEFAULT_BANDWIDTH_USAGE 0.8f /* 0 to 1 */
+ #define DEFAULT_MAX_BITRATE 0 /* in bit/s */
+ #define DEFAULT_MAX_VIDEO_WIDTH 0
+ #define DEFAULT_MAX_VIDEO_HEIGHT 0
+ #define DEFAULT_MAX_VIDEO_FRAMERATE_N 0
+ #define DEFAULT_MAX_VIDEO_FRAMERATE_D 1
+ #define DEFAULT_PRESENTATION_DELAY "10s" /* 10s */
+
+ /* Clock drift compensation for live streams */
+ #define SLOW_CLOCK_UPDATE_INTERVAL (1000000 * 30 * 60) /* 30 minutes */
+ #define FAST_CLOCK_UPDATE_INTERVAL (1000000 * 30) /* 30 seconds */
+ #define SUPPORTED_CLOCK_FORMATS (GST_MPD_UTCTIMING_TYPE_NTP | GST_MPD_UTCTIMING_TYPE_HTTP_HEAD | GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE | GST_MPD_UTCTIMING_TYPE_HTTP_ISO | GST_MPD_UTCTIMING_TYPE_HTTP_NTP)
+ #define NTP_TO_UNIX_EPOCH G_GUINT64_CONSTANT(2208988800) /* difference (in seconds) between NTP epoch and Unix epoch */
+
+ struct _GstDashDemuxClockDrift
+ {
+ GMutex clock_lock; /* used to protect access to struct */
+ guint selected_url;
+ gint64 next_update;
+ /* @clock_compensation: amount (in usecs) to add to client's idea of
+ now to map it to the server's idea of now */
+ GTimeSpan clock_compensation;
+ GstClock *ntp_clock;
+ };
+
+ typedef struct
+ {
+ guint64 start_offset, end_offset;
+ /* TODO: Timestamp and duration */
+ } GstDashStreamSyncSample;
+
+ /* GObject */
+ static void gst_dash_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_dash_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_dash_demux_dispose (GObject * obj);
+
+ /* GstAdaptiveDemux */
+ static GstClockTime gst_dash_demux_get_duration (GstAdaptiveDemux * ademux);
+ static gboolean gst_dash_demux_is_live (GstAdaptiveDemux * ademux);
+ static void gst_dash_demux_reset (GstAdaptiveDemux * ademux);
+ static gboolean gst_dash_demux_process_manifest (GstAdaptiveDemux * ademux,
+ GstBuffer * buf);
+ static gboolean gst_dash_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek);
+ static GstFlowReturn
+ gst_dash_demux_stream_update_fragment_info (GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn gst_dash_demux_stream_seek (GstAdaptiveDemuxStream *
+ stream, gboolean forward, GstSeekFlags flags, GstClockTime ts,
+ GstClockTime * final_ts);
+ static gboolean gst_dash_demux_stream_has_next_fragment (GstAdaptiveDemuxStream
+ * stream);
+ static GstFlowReturn
+ gst_dash_demux_stream_advance_fragment (GstAdaptiveDemuxStream * stream);
+ static gboolean
+ gst_dash_demux_stream_advance_subfragment (GstAdaptiveDemuxStream * stream);
+ static gboolean gst_dash_demux_stream_select_bitrate (GstAdaptiveDemuxStream *
+ stream, guint64 bitrate);
+ static gint64 gst_dash_demux_get_manifest_update_interval (GstAdaptiveDemux *
+ demux);
+ static GstFlowReturn gst_dash_demux_update_manifest_data (GstAdaptiveDemux *
+ demux, GstBuffer * buf);
+ static gint64
+ gst_dash_demux_stream_get_fragment_waiting_time (GstAdaptiveDemuxStream *
+ stream);
+ static void gst_dash_demux_advance_period (GstAdaptiveDemux * demux);
+ static gboolean gst_dash_demux_has_next_period (GstAdaptiveDemux * demux);
+ static GstFlowReturn gst_dash_demux_data_received (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer);
+ static gboolean
+ gst_dash_demux_stream_fragment_start (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn
+ gst_dash_demux_stream_fragment_finished (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static gboolean gst_dash_demux_need_another_chunk (GstAdaptiveDemuxStream *
+ stream);
+
+ /* GstDashDemux */
+ static gboolean gst_dash_demux_setup_all_streams (GstDashDemux * demux);
+ static void gst_dash_demux_stream_free (GstAdaptiveDemuxStream * stream);
+
+ static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux * demux,
+ GstActiveStream * stream);
+ static GstPad *gst_dash_demux_create_pad (GstDashDemux * demux,
+ GstActiveStream * stream);
+ static GstDashDemuxClockDrift *gst_dash_demux_clock_drift_new (GstDashDemux *
+ demux);
+ static void gst_dash_demux_clock_drift_free (GstDashDemuxClockDrift *);
+ static gboolean gst_dash_demux_poll_clock_drift (GstDashDemux * demux);
+ static GTimeSpan gst_dash_demux_get_clock_compensation (GstDashDemux * demux);
+ static GDateTime *gst_dash_demux_get_server_now_utc (GstDashDemux * demux);
+
+ #define SIDX(s) (&(s)->sidx_parser.sidx)
+
+ static inline GstSidxBoxEntry *
+ SIDX_ENTRY (GstDashDemuxStream * s, gint i)
+ {
+ g_assert (i < SIDX (s)->entries_count);
+ return &(SIDX (s)->entries[(i)]);
+ }
+
+ #define SIDX_CURRENT_ENTRY(s) SIDX_ENTRY(s, SIDX(s)->entry_index)
+
+ static void gst_dash_demux_send_content_protection_event (gpointer cp_data,
+ gpointer stream);
+
+ #define gst_dash_demux_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstDashDemux, gst_dash_demux, GST_TYPE_ADAPTIVE_DEMUX,
+ GST_DEBUG_CATEGORY_INIT (gst_dash_demux_debug, "dashdemux", 0,
+ "dashdemux element")
+ );
+ GST_ELEMENT_REGISTER_DEFINE (dashdemux, "dashdemux", GST_RANK_PRIMARY,
+ GST_TYPE_DASH_DEMUX);
+
+ static void
+ gst_dash_demux_dispose (GObject * obj)
+ {
+ GstDashDemux *demux = GST_DASH_DEMUX (obj);
+
+ gst_dash_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+
+ if (demux->client) {
+ gst_mpd_client_free (demux->client);
+ demux->client = NULL;
+ }
+
+ g_mutex_clear (&demux->client_lock);
+
+ gst_dash_demux_clock_drift_free (demux->clock_drift);
+ demux->clock_drift = NULL;
+ g_free (demux->default_presentation_delay);
+ G_OBJECT_CLASS (parent_class)->dispose (obj);
+ }
+
+ static gboolean
+ gst_dash_demux_get_live_seek_range (GstAdaptiveDemux * demux, gint64 * start,
+ gint64 * stop)
+ {
+ GstDashDemux *self = GST_DASH_DEMUX (demux);
+ GDateTime *now;
+ GDateTime *mstart;
+ GTimeSpan stream_now;
+ GstClockTime seg_duration;
+
+ if (self->client->mpd_root_node->availabilityStartTime == NULL)
+ return FALSE;
+
+ seg_duration = gst_mpd_client_get_maximum_segment_duration (self->client);
+ now = gst_dash_demux_get_server_now_utc (self);
+ mstart =
+ gst_date_time_to_g_date_time (self->client->mpd_root_node->
+ availabilityStartTime);
+ stream_now = g_date_time_difference (now, mstart);
+ g_date_time_unref (now);
+ g_date_time_unref (mstart);
+
+ if (stream_now <= 0)
+ return FALSE;
+
+ *stop = stream_now * GST_USECOND;
+ if (self->client->mpd_root_node->timeShiftBufferDepth ==
+ GST_MPD_DURATION_NONE) {
+ *start = 0;
+ } else {
+ *start =
+ *stop -
+ (self->client->mpd_root_node->timeShiftBufferDepth * GST_MSECOND);
+ if (*start < 0)
+ *start = 0;
+ }
+
+ /* As defined in 5.3.9.5.3 of the DASH specification, a segment does
+ not become available until the sum of:
+ * the value of the MPD@availabilityStartTime,
+ * the PeriodStart time of the containing Period
+ * the MPD start time of the Media Segment, and
+ * the MPD duration of the Media Segment.
+ Therefore we need to subtract the media segment duration from the stop
+ time.
+ */
+ *stop -= seg_duration;
+ return TRUE;
+ }
+
+ static GstClockTime
+ gst_dash_demux_get_presentation_offset (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+ return gst_mpd_client_get_stream_presentation_offset (dashdemux->client,
+ dashstream->index);
+ }
+
+ static GstClockTime
+ gst_dash_demux_get_period_start_time (GstAdaptiveDemux * demux)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+ return gst_mpd_client_get_period_start_time (dashdemux->client);
+ }
+
+ static void
+ gst_dash_demux_class_init (GstDashDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAdaptiveDemuxClass *gstadaptivedemux_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstadaptivedemux_class = (GstAdaptiveDemuxClass *) klass;
+
+ gobject_class->set_property = gst_dash_demux_set_property;
+ gobject_class->get_property = gst_dash_demux_get_property;
+ gobject_class->dispose = gst_dash_demux_dispose;
+
+ #ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_MAX_BUFFERING_TIME,
+ g_param_spec_uint ("max-buffering-time", "Maximum buffering time",
+ "Maximum number of seconds of buffer accumulated during playback"
+ "(deprecated)",
+ 2, G_MAXUINT, DEFAULT_MAX_BUFFERING_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_BANDWIDTH_USAGE,
+ g_param_spec_float ("bandwidth-usage",
+ "Bandwidth usage [0..1]",
+ "Percentage of the available bandwidth to use when "
+ "selecting representations (deprecated)",
+ 0, 1, DEFAULT_BANDWIDTH_USAGE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+ #endif
+
+ g_object_class_install_property (gobject_class, PROP_MAX_BITRATE,
+ g_param_spec_uint ("max-bitrate", "Max bitrate",
+ "Max of bitrate supported by target video decoder (0 = no maximum)",
+ 0, G_MAXUINT, DEFAULT_MAX_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_WIDTH,
+ g_param_spec_uint ("max-video-width", "Max video width",
+ "Max video width to select (0 = no maximum)",
+ 0, G_MAXUINT, DEFAULT_MAX_VIDEO_WIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_HEIGHT,
+ g_param_spec_uint ("max-video-height", "Max video height",
+ "Max video height to select (0 = no maximum)",
+ 0, G_MAXUINT, DEFAULT_MAX_VIDEO_HEIGHT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_FRAMERATE,
+ gst_param_spec_fraction ("max-video-framerate", "Max video framerate",
+ "Max video framerate to select (0/1 = no maximum)",
+ 0, 1, G_MAXINT, 1, DEFAULT_MAX_VIDEO_FRAMERATE_N,
+ DEFAULT_MAX_VIDEO_FRAMERATE_D,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PRESENTATION_DELAY,
+ g_param_spec_string ("presentation-delay", "Presentation delay",
+ "Default presentation delay (in seconds, milliseconds or fragments) (e.g. 12s, 2500ms, 3f)",
+ DEFAULT_PRESENTATION_DELAY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dash_demux_audiosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dash_demux_videosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dash_demux_subtitlesrc_template);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "DASH Demuxer",
+ "Codec/Demuxer/Adaptive",
+ "Dynamic Adaptive Streaming over HTTP demuxer",
+ "David Corvoysier <david.corvoysier@orange.com>\n\
+ Hamid Zakari <hamid.zakari@gmail.com>\n\
+ Gianluca Gennari <gennarone@gmail.com>");
+
+
+ gstadaptivedemux_class->get_duration = gst_dash_demux_get_duration;
+ gstadaptivedemux_class->is_live = gst_dash_demux_is_live;
+ gstadaptivedemux_class->reset = gst_dash_demux_reset;
+ gstadaptivedemux_class->seek = gst_dash_demux_seek;
+
+ gstadaptivedemux_class->process_manifest = gst_dash_demux_process_manifest;
+ gstadaptivedemux_class->update_manifest_data =
+ gst_dash_demux_update_manifest_data;
+ gstadaptivedemux_class->get_manifest_update_interval =
+ gst_dash_demux_get_manifest_update_interval;
+
+ gstadaptivedemux_class->has_next_period = gst_dash_demux_has_next_period;
+ gstadaptivedemux_class->advance_period = gst_dash_demux_advance_period;
+ gstadaptivedemux_class->stream_has_next_fragment =
+ gst_dash_demux_stream_has_next_fragment;
+ gstadaptivedemux_class->stream_advance_fragment =
+ gst_dash_demux_stream_advance_fragment;
+ gstadaptivedemux_class->stream_get_fragment_waiting_time =
+ gst_dash_demux_stream_get_fragment_waiting_time;
+ gstadaptivedemux_class->stream_seek = gst_dash_demux_stream_seek;
+ gstadaptivedemux_class->stream_select_bitrate =
+ gst_dash_demux_stream_select_bitrate;
+ gstadaptivedemux_class->stream_update_fragment_info =
+ gst_dash_demux_stream_update_fragment_info;
+ gstadaptivedemux_class->stream_free = gst_dash_demux_stream_free;
+ gstadaptivedemux_class->get_live_seek_range =
+ gst_dash_demux_get_live_seek_range;
+ gstadaptivedemux_class->get_presentation_offset =
+ gst_dash_demux_get_presentation_offset;
+ gstadaptivedemux_class->get_period_start_time =
+ gst_dash_demux_get_period_start_time;
+
+ gstadaptivedemux_class->start_fragment = gst_dash_demux_stream_fragment_start;
+ gstadaptivedemux_class->finish_fragment =
+ gst_dash_demux_stream_fragment_finished;
+ gstadaptivedemux_class->data_received = gst_dash_demux_data_received;
+ gstadaptivedemux_class->need_another_chunk =
+ gst_dash_demux_need_another_chunk;
+ }
+
+ static void
+ gst_dash_demux_init (GstDashDemux * demux)
+ {
+ /* Properties */
+ demux->max_buffering_time = DEFAULT_MAX_BUFFERING_TIME * GST_SECOND;
+ demux->max_bitrate = DEFAULT_MAX_BITRATE;
+ demux->max_video_width = DEFAULT_MAX_VIDEO_WIDTH;
+ demux->max_video_height = DEFAULT_MAX_VIDEO_HEIGHT;
+ demux->max_video_framerate_n = DEFAULT_MAX_VIDEO_FRAMERATE_N;
+ demux->max_video_framerate_d = DEFAULT_MAX_VIDEO_FRAMERATE_D;
+ demux->default_presentation_delay = g_strdup (DEFAULT_PRESENTATION_DELAY);
+
+ g_mutex_init (&demux->client_lock);
+
+ gst_adaptive_demux_set_stream_struct_size (GST_ADAPTIVE_DEMUX_CAST (demux),
+ sizeof (GstDashDemuxStream));
+ }
+
+ static void
+ gst_dash_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstAdaptiveDemux *adaptivedemux = GST_ADAPTIVE_DEMUX_CAST (object);
+ GstDashDemux *demux = GST_DASH_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_BUFFERING_TIME:
+ demux->max_buffering_time = g_value_get_uint (value) * GST_SECOND;
+ break;
+ case PROP_BANDWIDTH_USAGE:
+ adaptivedemux->bitrate_limit = g_value_get_float (value);
+ break;
+ case PROP_MAX_BITRATE:
+ demux->max_bitrate = g_value_get_uint (value);
+ break;
+ case PROP_MAX_VIDEO_WIDTH:
+ demux->max_video_width = g_value_get_uint (value);
+ break;
+ case PROP_MAX_VIDEO_HEIGHT:
+ demux->max_video_height = g_value_get_uint (value);
+ break;
+ case PROP_MAX_VIDEO_FRAMERATE:
+ demux->max_video_framerate_n = gst_value_get_fraction_numerator (value);
+ demux->max_video_framerate_d = gst_value_get_fraction_denominator (value);
+ break;
+ case PROP_PRESENTATION_DELAY:
+ g_free (demux->default_presentation_delay);
+ demux->default_presentation_delay = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_dash_demux_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstAdaptiveDemux *adaptivedemux = GST_ADAPTIVE_DEMUX_CAST (object);
+ GstDashDemux *demux = GST_DASH_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_BUFFERING_TIME:
+ g_value_set_uint (value, demux->max_buffering_time / GST_SECOND);
+ break;
+ case PROP_BANDWIDTH_USAGE:
+ g_value_set_float (value, adaptivedemux->bitrate_limit);
+ break;
+ case PROP_MAX_BITRATE:
+ g_value_set_uint (value, demux->max_bitrate);
+ break;
+ case PROP_MAX_VIDEO_WIDTH:
+ g_value_set_uint (value, demux->max_video_width);
+ break;
+ case PROP_MAX_VIDEO_HEIGHT:
+ g_value_set_uint (value, demux->max_video_height);
+ break;
+ case PROP_MAX_VIDEO_FRAMERATE:
+ gst_value_set_fraction (value, demux->max_video_framerate_n,
+ demux->max_video_framerate_d);
+ break;
+ case PROP_PRESENTATION_DELAY:
+ if (demux->default_presentation_delay == NULL)
+ g_value_set_static_string (value, "");
+ else
+ g_value_set_string (value, demux->default_presentation_delay);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ gst_dash_demux_setup_mpdparser_streams (GstDashDemux * demux,
+ GstMPDClient * client)
+ {
+ gboolean has_streams = FALSE;
+ GList *adapt_sets, *iter;
+
+ adapt_sets = gst_mpd_client_get_adaptation_sets (client);
+ for (iter = adapt_sets; iter; iter = g_list_next (iter)) {
+ GstMPDAdaptationSetNode *adapt_set_node = iter->data;
+
+ gst_mpd_client_setup_streaming (client, adapt_set_node);
+ has_streams = TRUE;
+ }
+
+ if (!has_streams) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, ("Manifest has no playable "
+ "streams"), ("No streams could be activated from the manifest"));
+ }
+ return has_streams;
+ }
+
+ static gboolean
+ gst_dash_demux_setup_all_streams (GstDashDemux * demux)
+ {
+ guint i;
+
+ GST_DEBUG_OBJECT (demux, "Setting up streams for period %d",
+ gst_mpd_client_get_period_index (demux->client));
+
+ /* clean old active stream list, if any */
+ gst_mpd_client_active_streams_free (demux->client);
+
+ if (!gst_dash_demux_setup_mpdparser_streams (demux, demux->client)) {
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Creating stream objects");
+ for (i = 0; i < gst_mpd_client_get_nb_active_stream (demux->client); i++) {
+ GstDashDemuxStream *stream;
+ GstActiveStream *active_stream;
+ GstCaps *caps;
+ GstStructure *s;
+ GstPad *srcpad;
+ gchar *lang = NULL;
+ GstTagList *tags = NULL;
+
+ active_stream =
+ gst_mpd_client_get_active_stream_by_index (demux->client, i);
+ if (active_stream == NULL)
+ continue;
+
+ if (demux->trickmode_no_audio
+ && active_stream->mimeType == GST_STREAM_AUDIO) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping audio stream %d because of TRICKMODE_NO_AUDIO flag", i);
+ continue;
+ }
+
+ srcpad = gst_dash_demux_create_pad (demux, active_stream);
+ if (srcpad == NULL)
+ continue;
+
+ caps = gst_dash_demux_get_input_caps (demux, active_stream);
+ GST_LOG_OBJECT (demux, "Creating stream %d %" GST_PTR_FORMAT, i, caps);
+
+ if (active_stream->cur_adapt_set) {
+ GstMPDAdaptationSetNode *adp_set = active_stream->cur_adapt_set;
+ lang = adp_set->lang;
+
+ /* Fallback to the language in ContentComponent node */
+ if (lang == NULL) {
+ GList *it;
+
+ for (it = adp_set->ContentComponents; it; it = it->next) {
+ GstMPDContentComponentNode *cc_node = it->data;
+ if (cc_node->lang) {
+ lang = cc_node->lang;
+ break;
+ }
+ }
+ }
+ }
+
+ if (lang) {
+ if (gst_tag_check_language_code (lang))
+ tags = gst_tag_list_new (GST_TAG_LANGUAGE_CODE, lang, NULL);
+ else
+ tags = gst_tag_list_new (GST_TAG_LANGUAGE_NAME, lang, NULL);
+ }
+
+ stream = (GstDashDemuxStream *)
+ gst_adaptive_demux_stream_new (GST_ADAPTIVE_DEMUX_CAST (demux), srcpad);
+ stream->active_stream = active_stream;
+ s = gst_caps_get_structure (caps, 0);
+ stream->allow_sidx =
+ gst_mpd_client_has_isoff_ondemand_profile (demux->client);
+ stream->is_isobmff = gst_structure_has_name (s, "video/quicktime")
+ || gst_structure_has_name (s, "audio/x-m4a");
+ stream->first_sync_sample_always_after_moof = TRUE;
+ stream->adapter = gst_adapter_new ();
+ gst_adaptive_demux_stream_set_caps (GST_ADAPTIVE_DEMUX_STREAM_CAST (stream),
+ caps);
+ if (tags)
+ gst_adaptive_demux_stream_set_tags (GST_ADAPTIVE_DEMUX_STREAM_CAST
+ (stream), tags);
+ stream->index = i;
+ stream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+ stream->sidx_position = GST_CLOCK_TIME_NONE;
+ stream->actual_position = GST_CLOCK_TIME_NONE;
+ stream->target_time = GST_CLOCK_TIME_NONE;
+ /* Set a default average keyframe download time of a quarter of a second */
+ stream->average_download_time = 250 * GST_MSECOND;
+
+ if (active_stream->cur_adapt_set &&
+ GST_MPD_REPRESENTATION_BASE_NODE (active_stream->
+ cur_adapt_set)->ContentProtection) {
+ GST_DEBUG_OBJECT (demux, "Adding ContentProtection events to source pad");
+ g_list_foreach (GST_MPD_REPRESENTATION_BASE_NODE
+ (active_stream->cur_adapt_set)->ContentProtection,
+ gst_dash_demux_send_content_protection_event, stream);
+ }
+
+ gst_isoff_sidx_parser_init (&stream->sidx_parser);
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_dash_demux_send_content_protection_event (gpointer data, gpointer userdata)
+ {
+ GstMPDDescriptorTypeNode *cp = (GstMPDDescriptorTypeNode *) data;
+ GstDashDemuxStream *stream = (GstDashDemuxStream *) userdata;
+ GstEvent *event;
+ GstBuffer *pssi;
+ glong pssi_len;
+ gchar *schemeIdUri;
+ GstPad *pad = GST_ADAPTIVE_DEMUX_STREAM_PAD (stream);
+
+ if (cp->schemeIdUri == NULL)
+ return;
+
+ GST_TRACE_OBJECT (pad, "check schemeIdUri %s", cp->schemeIdUri);
+ /* RFC 2141 states: The leading "urn:" sequence is case-insensitive */
+ schemeIdUri = g_ascii_strdown (cp->schemeIdUri, -1);
+ if (g_str_has_prefix (schemeIdUri, "urn:uuid:")) {
+ pssi_len = strlen (cp->value);
+ pssi = gst_buffer_new_memdup (cp->value, pssi_len);
+ /* RFC 4122 states that the hex part of a UUID is in lower case,
+ * but some streams seem to ignore this and use upper case for the
+ * protection system ID */
+ event = gst_event_new_protection (cp->schemeIdUri + 9, pssi, "dash/mpd");
+ GST_LOG_OBJECT (pad,
+ "Queueing protection event %" GST_PTR_FORMAT " on source pad", event);
+ gst_adaptive_demux_stream_queue_event ((GstAdaptiveDemuxStream *) stream,
+ event);
+ gst_buffer_unref (pssi);
+ }
+ g_free (schemeIdUri);
+ }
+
+ static GstClockTime
+ gst_dash_demux_get_duration (GstAdaptiveDemux * ademux)
+ {
+ GstDashDemux *demux = GST_DASH_DEMUX_CAST (ademux);
+
+ g_return_val_if_fail (demux->client != NULL, GST_CLOCK_TIME_NONE);
+
+ return gst_mpd_client_get_media_presentation_duration (demux->client);
+ }
+
+ static gboolean
+ gst_dash_demux_is_live (GstAdaptiveDemux * ademux)
+ {
+ GstDashDemux *demux = GST_DASH_DEMUX_CAST (ademux);
+
+ g_return_val_if_fail (demux->client != NULL, FALSE);
+
+ return gst_mpd_client_is_live (demux->client);
+ }
+
+ static gboolean
+ gst_dash_demux_setup_streams (GstAdaptiveDemux * demux)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ gboolean ret = TRUE;
+ GstDateTime *now = NULL;
+ guint period_idx;
+
+ /* setup video, audio and subtitle streams, starting from first Period if
+ * non-live */
+ period_idx = 0;
+ if (gst_mpd_client_is_live (dashdemux->client)) {
+ GDateTime *g_now;
+ if (dashdemux->client->mpd_root_node->availabilityStartTime == NULL) {
+ ret = FALSE;
+ GST_ERROR_OBJECT (demux, "MPD does not have availabilityStartTime");
+ goto done;
+ }
+ if (dashdemux->clock_drift == NULL) {
+ gchar **urls;
+ urls =
+ gst_mpd_client_get_utc_timing_sources (dashdemux->client,
+ SUPPORTED_CLOCK_FORMATS, NULL);
+ if (urls) {
+ GST_DEBUG_OBJECT (dashdemux, "Found a supported UTCTiming element");
+ dashdemux->clock_drift = gst_dash_demux_clock_drift_new (dashdemux);
+ gst_dash_demux_poll_clock_drift (dashdemux);
+ }
+ }
+ /* get period index for period encompassing the current time */
+ g_now = gst_dash_demux_get_server_now_utc (dashdemux);
+ now = gst_date_time_new_from_g_date_time (g_now);
+ if (dashdemux->client->mpd_root_node->suggestedPresentationDelay != -1) {
+ GstDateTime *target = gst_mpd_client_add_time_difference (now,
+ dashdemux->client->mpd_root_node->suggestedPresentationDelay * -1000);
+ gst_date_time_unref (now);
+ now = target;
+ } else if (dashdemux->default_presentation_delay) {
+ gint64 dfp =
+ gst_mpd_client_parse_default_presentation_delay (dashdemux->client,
+ dashdemux->default_presentation_delay);
+ GstDateTime *target = gst_mpd_client_add_time_difference (now,
+ dfp * -1000);
+ gst_date_time_unref (now);
+ now = target;
+ }
+ period_idx =
+ gst_mpd_client_get_period_index_at_time (dashdemux->client, now);
+ if (period_idx == G_MAXUINT) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar *date_str = gst_date_time_to_iso8601_string (now);
+ GST_DEBUG_OBJECT (demux, "Unable to find live period active at %s",
+ date_str);
+ g_free (date_str);
+ #endif
+ ret = FALSE;
+ goto done;
+ }
+ }
+
+ if (!gst_mpd_client_set_period_index (dashdemux->client, period_idx) ||
+ !gst_dash_demux_setup_all_streams (dashdemux)) {
+ ret = FALSE;
+ goto done;
+ }
+
+ /* If stream is live, try to find the segment that
+ * is closest to current time */
+ if (gst_mpd_client_is_live (dashdemux->client)) {
+ GDateTime *gnow;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to current time of day for live stream ");
+
+ gnow = gst_date_time_to_g_date_time (now);
+ gst_mpd_client_seek_to_time (dashdemux->client, gnow);
+ g_date_time_unref (gnow);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Seeking to first segment for on-demand stream ");
+
+ /* start playing from the first segment */
+ gst_mpd_client_seek_to_first_segment (dashdemux->client);
+ }
+
+ done:
+ if (now != NULL)
+ gst_date_time_unref (now);
+ return ret;
+ }
+
+ static gboolean
+ gst_dash_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ gboolean ret = FALSE;
+ gchar *manifest;
+ GstMapInfo mapinfo;
+
+ if (dashdemux->client)
+ gst_mpd_client_free (dashdemux->client);
+ dashdemux->client = gst_mpd_client_new ();
+ gst_mpd_client_set_uri_downloader (dashdemux->client, demux->downloader);
+
+ dashdemux->client->mpd_uri = g_strdup (demux->manifest_uri);
+ dashdemux->client->mpd_base_uri = g_strdup (demux->manifest_base_uri);
+
+ GST_DEBUG_OBJECT (demux, "Fetched MPD file at URI: %s (base: %s)",
+ dashdemux->client->mpd_uri,
+ GST_STR_NULL (dashdemux->client->mpd_base_uri));
+
+ if (gst_buffer_map (buf, &mapinfo, GST_MAP_READ)) {
+ manifest = (gchar *) mapinfo.data;
+ if (gst_mpd_client_parse (dashdemux->client, manifest, mapinfo.size)) {
+ if (gst_mpd_client_setup_media_presentation (dashdemux->client, 0, 0,
+ NULL)) {
+ ret = TRUE;
+ } else {
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE,
+ ("Incompatible manifest file."), (NULL));
+ }
+ }
+ gst_buffer_unmap (buf, &mapinfo);
+ } else {
+ GST_WARNING_OBJECT (demux, "Failed to map manifest buffer");
+ }
+
+ if (ret)
+ ret = gst_dash_demux_setup_streams (demux);
+
+ return ret;
+ }
+
+ static GstPad *
+ gst_dash_demux_create_pad (GstDashDemux * demux, GstActiveStream * stream)
+ {
+ GstPad *pad;
+ GstPadTemplate *tmpl;
+ gchar *name;
+
+ switch (stream->mimeType) {
+ case GST_STREAM_AUDIO:
+ name = g_strdup_printf ("audio_%02u", demux->n_audio_streams++);
+ tmpl = gst_static_pad_template_get (&gst_dash_demux_audiosrc_template);
+ break;
+ case GST_STREAM_VIDEO:
+ name = g_strdup_printf ("video_%02u", demux->n_video_streams++);
+ tmpl = gst_static_pad_template_get (&gst_dash_demux_videosrc_template);
+ break;
+ case GST_STREAM_APPLICATION:
+ if (gst_mpd_client_active_stream_contains_subtitles (stream)) {
+ name = g_strdup_printf ("subtitle_%02u", demux->n_subtitle_streams++);
+ tmpl =
+ gst_static_pad_template_get (&gst_dash_demux_subtitlesrc_template);
+ } else {
+ return NULL;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+
+ /* Create and activate new pads */
+ pad = gst_pad_new_from_template (tmpl, name);
+ g_free (name);
+ gst_object_unref (tmpl);
+
+ gst_pad_set_active (pad, TRUE);
+ GST_INFO_OBJECT (demux, "Creating srcpad %s:%s", GST_DEBUG_PAD_NAME (pad));
+ return pad;
+ }
+
+ static void
+ gst_dash_demux_reset (GstAdaptiveDemux * ademux)
+ {
+ GstDashDemux *demux = GST_DASH_DEMUX_CAST (ademux);
+
+ GST_DEBUG_OBJECT (demux, "Resetting demux");
+
+ demux->end_of_period = FALSE;
+ demux->end_of_manifest = FALSE;
+
+ if (demux->client) {
+ gst_mpd_client_free (demux->client);
+ demux->client = NULL;
+ }
+ gst_dash_demux_clock_drift_free (demux->clock_drift);
+ demux->clock_drift = NULL;
+ demux->client = gst_mpd_client_new ();
+ gst_mpd_client_set_uri_downloader (demux->client, ademux->downloader);
+
+ demux->n_audio_streams = 0;
+ demux->n_video_streams = 0;
+ demux->n_subtitle_streams = 0;
+
+ demux->trickmode_no_audio = FALSE;
+ demux->allow_trickmode_key_units = TRUE;
+ }
+
+ static GstCaps *
+ gst_dash_demux_get_video_input_caps (GstDashDemux * demux,
+ GstActiveStream * stream)
+ {
+ guint width = 0, height = 0;
+ gint fps_num = 0, fps_den = 1;
+ gboolean have_fps = FALSE;
+ GstCaps *caps = NULL;
+
+ if (stream == NULL)
+ return NULL;
+
+ /* if bitstreamSwitching is true we don't need to switch pads on resolution change */
+ if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
+ width = gst_mpd_client_get_video_stream_width (stream);
+ height = gst_mpd_client_get_video_stream_height (stream);
+ have_fps =
+ gst_mpd_client_get_video_stream_framerate (stream, &fps_num, &fps_den);
+ }
+ caps = gst_mpd_client_get_stream_caps (stream);
+ if (caps == NULL)
+ return NULL;
+
+ if (width > 0 && height > 0) {
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, width, "height",
+ G_TYPE_INT, height, NULL);
+ }
+
+ if (have_fps) {
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fps_num,
+ fps_den, NULL);
+ }
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_dash_demux_get_audio_input_caps (GstDashDemux * demux,
+ GstActiveStream * stream)
+ {
+ guint rate = 0, channels = 0;
+ GstCaps *caps = NULL;
+
+ if (stream == NULL)
+ return NULL;
+
+ /* if bitstreamSwitching is true we don't need to switch pads on rate/channels change */
+ if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
+ channels = gst_mpd_client_get_audio_stream_num_channels (stream);
+ rate = gst_mpd_client_get_audio_stream_rate (stream);
+ }
+ caps = gst_mpd_client_get_stream_caps (stream);
+ if (caps == NULL)
+ return NULL;
+
+ if (rate > 0) {
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL);
+ }
+ if (channels > 0) {
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL);
+ }
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_dash_demux_get_application_input_caps (GstDashDemux * demux,
+ GstActiveStream * stream)
+ {
+ GstCaps *caps = NULL;
+
+ if (stream == NULL)
+ return NULL;
+
+ caps = gst_mpd_client_get_stream_caps (stream);
+ if (caps == NULL)
+ return NULL;
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_dash_demux_get_input_caps (GstDashDemux * demux, GstActiveStream * stream)
+ {
+ switch (stream->mimeType) {
+ case GST_STREAM_VIDEO:
+ return gst_dash_demux_get_video_input_caps (demux, stream);
+ case GST_STREAM_AUDIO:
+ return gst_dash_demux_get_audio_input_caps (demux, stream);
+ case GST_STREAM_APPLICATION:
+ return gst_dash_demux_get_application_input_caps (demux, stream);
+ default:
+ return GST_CAPS_NONE;
+ }
+ }
+
+ static void
+ gst_dash_demux_stream_update_headers_info (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ gchar *path = NULL;
+
+ gst_mpd_client_get_next_header (dashdemux->client,
+ &path, dashstream->index,
+ &stream->fragment.header_range_start, &stream->fragment.header_range_end);
+
+ if (path != NULL) {
+ stream->fragment.header_uri =
+ gst_uri_join_strings (gst_mpd_client_get_baseURL (dashdemux->client,
+ dashstream->index), path);
+ g_free (path);
+ path = NULL;
+ }
+
+ gst_mpd_client_get_next_header_index (dashdemux->client,
+ &path, dashstream->index,
+ &stream->fragment.index_range_start, &stream->fragment.index_range_end);
+
+ if (path != NULL) {
+ stream->fragment.index_uri =
+ gst_uri_join_strings (gst_mpd_client_get_baseURL (dashdemux->client,
+ dashstream->index), path);
+ g_free (path);
+ }
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_stream_update_fragment_info (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ GstClockTime ts;
+ GstMediaFragmentInfo fragment;
+ gboolean isombff;
+
+ gst_adaptive_demux_stream_fragment_clear (&stream->fragment);
+
+ isombff = gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client);
+
+ /* Reset chunk size if any */
+ stream->fragment.chunk_size = 0;
+ dashstream->current_fragment_keyframe_distance = GST_CLOCK_TIME_NONE;
+
+ if (GST_ADAPTIVE_DEMUX_STREAM_NEED_HEADER (stream) && isombff) {
+ gst_dash_demux_stream_update_headers_info (stream);
+ /* sidx entries may not be available in here */
+ if (stream->fragment.index_uri
+ && dashstream->sidx_position != GST_CLOCK_TIME_NONE) {
+ /* request only the index to be downloaded as we need to reposition the
+ * stream to a subsegment */
+ return GST_FLOW_OK;
+ }
+ }
+
+ if (dashstream->moof_sync_samples
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+ GstDashStreamSyncSample *sync_sample =
+ &g_array_index (dashstream->moof_sync_samples, GstDashStreamSyncSample,
+ dashstream->current_sync_sample);
+
+ gst_mpd_client_get_next_fragment (dashdemux->client, dashstream->index,
+ &fragment);
+
+ if (isombff && dashstream->sidx_position != GST_CLOCK_TIME_NONE
+ && SIDX (dashstream)->entries) {
+ GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+ dashstream->current_fragment_timestamp = fragment.timestamp = entry->pts;
+ dashstream->current_fragment_duration = fragment.duration =
+ entry->duration;
+ } else {
+ dashstream->current_fragment_timestamp = fragment.timestamp;
+ dashstream->current_fragment_duration = fragment.duration;
+ }
+
+ dashstream->current_fragment_keyframe_distance =
+ fragment.duration / dashstream->moof_sync_samples->len;
+ dashstream->actual_position =
+ fragment.timestamp +
+ dashstream->current_sync_sample *
+ dashstream->current_fragment_keyframe_distance;
+ if (stream->segment.rate < 0.0)
+ dashstream->actual_position +=
+ dashstream->current_fragment_keyframe_distance;
+ dashstream->actual_position =
+ MIN (dashstream->actual_position,
+ fragment.timestamp + fragment.duration);
+
+ stream->fragment.uri = fragment.uri;
+ stream->fragment.timestamp = GST_CLOCK_TIME_NONE;
+ stream->fragment.duration = GST_CLOCK_TIME_NONE;
+ stream->fragment.range_start = sync_sample->start_offset;
+ stream->fragment.range_end = sync_sample->end_offset;
+
+ GST_DEBUG_OBJECT (stream->pad, "Actual position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dashstream->actual_position));
+
+ return GST_FLOW_OK;
+ }
+
+ if (gst_mpd_client_get_next_fragment_timestamp (dashdemux->client,
+ dashstream->index, &ts)) {
+ if (GST_ADAPTIVE_DEMUX_STREAM_NEED_HEADER (stream)) {
+ gst_adaptive_demux_stream_fragment_clear (&stream->fragment);
+ gst_dash_demux_stream_update_headers_info (stream);
+ }
+
+ gst_mpd_client_get_next_fragment (dashdemux->client, dashstream->index,
+ &fragment);
+
+ stream->fragment.uri = fragment.uri;
+ /* If mpd does not specify indexRange (i.e., null index_uri),
+ * sidx entries may not be available until download it */
+ if (isombff && dashstream->sidx_position != GST_CLOCK_TIME_NONE
+ && SIDX (dashstream)->entries) {
+ GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+ stream->fragment.range_start =
+ dashstream->sidx_base_offset + entry->offset;
+ dashstream->actual_position = stream->fragment.timestamp = entry->pts;
+ dashstream->current_fragment_timestamp = stream->fragment.timestamp =
+ entry->pts;
+ dashstream->current_fragment_duration = stream->fragment.duration =
+ entry->duration;
+ if (stream->demux->segment.rate < 0.0) {
+ stream->fragment.range_end =
+ stream->fragment.range_start + entry->size - 1;
+ dashstream->actual_position += entry->duration;
+ } else {
+ stream->fragment.range_end = fragment.range_end;
+ }
+ } else {
+ dashstream->actual_position = stream->fragment.timestamp =
+ fragment.timestamp;
+ dashstream->current_fragment_timestamp = fragment.timestamp;
+ dashstream->current_fragment_duration = stream->fragment.duration =
+ fragment.duration;
+ if (stream->demux->segment.rate < 0.0)
+ dashstream->actual_position += fragment.duration;
+ stream->fragment.range_start =
+ MAX (fragment.range_start, dashstream->sidx_base_offset);
+ stream->fragment.range_end = fragment.range_end;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "Actual position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dashstream->actual_position));
+
+ return GST_FLOW_OK;
+ }
+
+ return GST_FLOW_EOS;
+ }
+
+ static gint
+ gst_dash_demux_index_entry_search (GstSidxBoxEntry * entry, GstClockTime * ts,
+ gpointer user_data)
+ {
+ GstClockTime entry_ts = entry->pts + entry->duration;
+ if (entry_ts <= *ts)
+ return -1;
+ else if (entry->pts > *ts)
+ return 1;
+ else
+ return 0;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_stream_sidx_seek (GstDashDemuxStream * dashstream,
+ gboolean forward, GstSeekFlags flags, GstClockTime ts,
+ GstClockTime * final_ts)
+ {
+ GstSidxBox *sidx = SIDX (dashstream);
+ GstSidxBoxEntry *entry;
+ gint idx = sidx->entries_count;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (sidx->entries_count == 0)
+ return GST_FLOW_EOS;
+
+ entry =
+ gst_util_array_binary_search (sidx->entries, sidx->entries_count,
+ sizeof (GstSidxBoxEntry),
+ (GCompareDataFunc) gst_dash_demux_index_entry_search,
+ GST_SEARCH_MODE_EXACT, &ts, NULL);
+
+ /* No exact match found, nothing in our index
+ * This is usually a bug or broken stream, as the seeking code already
+ * makes sure that we're in the correct period and segment, and only need
+ * to find the correct place inside the segment. Allow for some rounding
+ * errors and inaccuracies here though */
+ if (!entry) {
+ GstSidxBoxEntry *last_entry = &sidx->entries[sidx->entries_count - 1];
+
+ GST_WARNING_OBJECT (dashstream->parent.pad, "Couldn't find SIDX entry");
+
+ if (ts < sidx->entries[0].pts
+ && ts + 250 * GST_MSECOND >= sidx->entries[0].pts)
+ entry = &sidx->entries[0];
+ else if (ts >= last_entry->pts + last_entry->duration &&
+ ts < last_entry->pts + last_entry->duration + 250 * GST_MSECOND)
+ entry = last_entry;
+ }
+ if (!entry)
+ return GST_FLOW_EOS;
+
+ idx = entry - sidx->entries;
+
+ /* FIXME in reverse mode, if we are exactly at a fragment start it makes more
+ * sense to start from the end of the previous fragment */
+ if (!forward && idx > 0 && entry->pts == ts) {
+ idx--;
+ entry = &sidx->entries[idx];
+ }
+
+ /* Now entry->pts <= ts < entry->pts + entry->duration, need to adjust for
+ * snapping */
+ if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+ if (idx + 1 < sidx->entries_count
+ && sidx->entries[idx + 1].pts - ts < ts - sidx->entries[idx].pts)
+ idx += 1;
+ } else if ((forward && (flags & GST_SEEK_FLAG_SNAP_AFTER)) || (!forward
+ && (flags & GST_SEEK_FLAG_SNAP_BEFORE))) {
+ if (idx + 1 < sidx->entries_count && entry->pts < ts)
+ idx += 1;
+ }
+
+ g_assert (sidx->entry_index < sidx->entries_count);
+
+ sidx->entry_index = idx;
+ dashstream->sidx_position = sidx->entries[idx].pts;
+
+ if (final_ts)
+ *final_ts = dashstream->sidx_position;
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_stream_seek (GstAdaptiveDemuxStream * stream, gboolean forward,
+ GstSeekFlags flags, GstClockTime ts, GstClockTime * final_ts)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ gint last_index, last_repeat;
+ gboolean is_isobmff;
+
+ last_index = dashstream->active_stream->segment_index;
+ last_repeat = dashstream->active_stream->segment_repeat_index;
+
+ if (dashstream->adapter)
+ gst_adapter_clear (dashstream->adapter);
+ dashstream->current_offset = -1;
+ dashstream->current_index_header_or_data = 0;
+
+ dashstream->isobmff_parser.current_fourcc = 0;
+ dashstream->isobmff_parser.current_start_offset = 0;
+ dashstream->isobmff_parser.current_size = 0;
+
+ if (dashstream->moof)
+ gst_isoff_moof_box_free (dashstream->moof);
+ dashstream->moof = NULL;
+ if (dashstream->moof_sync_samples)
+ g_array_free (dashstream->moof_sync_samples, TRUE);
+ dashstream->moof_sync_samples = NULL;
+ dashstream->current_sync_sample = -1;
+ dashstream->target_time = GST_CLOCK_TIME_NONE;
+
+ is_isobmff = gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client);
+
+ if (!gst_mpd_client_stream_seek (dashdemux->client, dashstream->active_stream,
+ forward,
+ is_isobmff ? (flags & (~(GST_SEEK_FLAG_SNAP_BEFORE |
+ GST_SEEK_FLAG_SNAP_AFTER))) : flags, ts, final_ts)) {
+ return GST_FLOW_EOS;
+ }
+
+ if (is_isobmff) {
+ GstClockTime period_start, offset;
+
+ period_start = gst_mpd_client_get_period_start_time (dashdemux->client);
+ offset =
+ gst_mpd_client_get_stream_presentation_offset (dashdemux->client,
+ dashstream->index);
+
+ if (G_UNLIKELY (ts < period_start))
+ ts = offset;
+ else
+ ts += offset - period_start;
+
+ if (last_index != dashstream->active_stream->segment_index ||
+ last_repeat != dashstream->active_stream->segment_repeat_index) {
+ GST_LOG_OBJECT (stream->pad,
+ "Segment index was changed, reset sidx parser");
+ gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+ dashstream->sidx_base_offset = 0;
+ dashstream->allow_sidx = TRUE;
+ }
+
+ if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ if (gst_dash_demux_stream_sidx_seek (dashstream, forward, flags, ts,
+ final_ts) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (stream->pad, "Couldn't find position in sidx");
+ dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+ gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+ }
+ dashstream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+ } else {
+ /* no index yet, seek when we have it */
+ /* FIXME - the final_ts won't be correct here */
+ dashstream->pending_seek_ts = ts;
+ }
+ }
+
+ stream->discont = TRUE;
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_has_next_sync_sample (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ if (dashstream->moof_sync_samples &&
+ GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+ if (stream->demux->segment.rate > 0.0) {
+ if (dashstream->current_sync_sample + 1 <
+ dashstream->moof_sync_samples->len)
+ return TRUE;
+ } else {
+ if (dashstream->current_sync_sample >= 1)
+ return TRUE;
+ }
+ }
+ return FALSE;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_has_next_subfragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstSidxBox *sidx = SIDX (dashstream);
+
+ if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ if (stream->demux->segment.rate > 0.0) {
+ if (sidx->entry_index + 1 < sidx->entries_count)
+ return TRUE;
+ } else {
+ if (sidx->entry_index >= 1)
+ return TRUE;
+ }
+ }
+ return FALSE;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_advance_sync_sample (GstAdaptiveDemuxStream * stream,
+ GstClockTime target_time)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ gboolean fragment_finished = FALSE;
+ guint idx = -1;
+
+ if (GST_CLOCK_TIME_IS_VALID (target_time)) {
+ GST_LOG_OBJECT (stream->pad,
+ "target_time:%" GST_TIME_FORMAT " fragment ts %" GST_TIME_FORMAT
+ " average keyframe dist: %" GST_TIME_FORMAT
+ " current keyframe dist: %" GST_TIME_FORMAT
+ " fragment duration:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (target_time),
+ GST_TIME_ARGS (dashstream->current_fragment_timestamp),
+ GST_TIME_ARGS (dashstream->keyframe_average_distance),
+ GST_TIME_ARGS (dashstream->current_fragment_keyframe_distance),
+ GST_TIME_ARGS (stream->fragment.duration));
+
+ if (stream->demux->segment.rate > 0.0) {
+ idx =
+ (target_time -
+ dashstream->current_fragment_timestamp) /
+ dashstream->current_fragment_keyframe_distance;
+
+ /* Prevent getting stuck in a loop due to rounding errors */
+ if (idx == dashstream->current_sync_sample)
+ idx++;
+ } else {
+ GstClockTime end_time =
+ dashstream->current_fragment_timestamp +
+ dashstream->current_fragment_duration;
+
+ if (end_time < target_time) {
+ idx = dashstream->moof_sync_samples->len;
+ } else {
+ idx =
+ (end_time -
+ target_time) / dashstream->current_fragment_keyframe_distance;
+ if (idx == dashstream->moof_sync_samples->len) {
+ dashstream->current_sync_sample = -1;
+ fragment_finished = TRUE;
+ goto beach;
+ }
+ idx = dashstream->moof_sync_samples->len - 1 - idx;
+ }
+
+ /* Prevent getting stuck in a loop due to rounding errors */
+ if (idx == dashstream->current_sync_sample) {
+ if (idx == 0) {
+ dashstream->current_sync_sample = -1;
+ fragment_finished = TRUE;
+ goto beach;
+ }
+
+ idx--;
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Advancing sync sample #%d target #%d",
+ dashstream->current_sync_sample, idx);
+
+ if (idx != -1 && idx >= dashstream->moof_sync_samples->len) {
+ dashstream->current_sync_sample = -1;
+ fragment_finished = TRUE;
+ goto beach;
+ }
+
+ if (stream->demux->segment.rate > 0.0) {
+ /* Try to get the sync sample for the target time */
+ if (idx != -1) {
+ dashstream->current_sync_sample = idx;
+ } else {
+ dashstream->current_sync_sample++;
+ if (dashstream->current_sync_sample >= dashstream->moof_sync_samples->len) {
+ fragment_finished = TRUE;
+ }
+ }
+ } else {
+ if (idx != -1) {
+ dashstream->current_sync_sample = idx;
+ } else if (dashstream->current_sync_sample == -1) {
+ dashstream->current_sync_sample = dashstream->moof_sync_samples->len - 1;
+ } else if (dashstream->current_sync_sample == 0) {
+ dashstream->current_sync_sample = -1;
+ fragment_finished = TRUE;
+ } else {
+ dashstream->current_sync_sample--;
+ }
+ }
+
+ beach:
+ GST_DEBUG_OBJECT (stream->pad,
+ "Advancing sync sample #%d fragment_finished:%d",
+ dashstream->current_sync_sample, fragment_finished);
+
+ if (!fragment_finished)
+ stream->discont = TRUE;
+
+ return !fragment_finished;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_advance_subfragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ GstSidxBox *sidx = SIDX (dashstream);
+ gboolean fragment_finished = TRUE;
+
+ if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ if (stream->demux->segment.rate > 0.0) {
+ gint idx = ++sidx->entry_index;
+ if (idx < sidx->entries_count) {
+ fragment_finished = FALSE;
+ }
+
+ if (idx == sidx->entries_count)
+ dashstream->sidx_position =
+ sidx->entries[idx - 1].pts + sidx->entries[idx - 1].duration;
+ else
+ dashstream->sidx_position = sidx->entries[idx].pts;
+ } else {
+ gint idx = --sidx->entry_index;
+
+ if (idx >= 0) {
+ fragment_finished = FALSE;
+ dashstream->sidx_position = sidx->entries[idx].pts;
+ } else {
+ dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "New sidx index: %d / %d. "
+ "Finished fragment: %d", sidx->entry_index, sidx->entries_count,
+ fragment_finished);
+
+ return !fragment_finished;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_has_next_fragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ if (dashstream->moof_sync_samples &&
+ GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+ if (gst_dash_demux_stream_has_next_sync_sample (stream))
+ return TRUE;
+ }
+
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client)) {
+ if (gst_dash_demux_stream_has_next_subfragment (stream))
+ return TRUE;
+ }
+
+ return gst_mpd_client_has_next_segment (dashdemux->client,
+ dashstream->active_stream, stream->demux->segment.rate > 0.0);
+ }
+
+ /* The goal here is to figure out, once we have pushed a keyframe downstream,
+ * what the next ideal keyframe to download is.
- HTTP response.
++ *
+ * This is done based on:
+ * * the current internal position (i.e. actual_position)
+ * * the reported downstream position (QoS feedback)
+ * * the average keyframe download time (average_download_time)
+ */
+ static GstClockTime
+ gst_dash_demux_stream_get_target_time (GstDashDemux * dashdemux,
+ GstAdaptiveDemuxStream * stream, GstClockTime cur_position,
+ GstClockTime min_skip)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstClockTime cur_running, min_running, min_position;
+ GstClockTimeDiff diff;
+ GstClockTime ret = cur_position;
+ GstClockTime deadline;
+ GstClockTime upstream_earliest_time;
+ GstClockTime earliest_time = GST_CLOCK_TIME_NONE;
+
+ g_assert (min_skip > 0);
+
+ /* minimum stream position we have to skip to */
+ if (stream->segment.rate > 0)
+ min_position = cur_position + min_skip;
+ else if (cur_position < min_skip)
+ min_position = 0;
+ else
+ min_position = cur_position - min_skip;
+
+ /* Use current clock time or the QoS earliest time, whichever is further in
+ * the future. The QoS time is only updated on every QoS event and
+ * especially not if e.g. a videodecoder or converter drops a frame further
+ * downstream.
+ *
+ * We only use the times if we ever received a QoS event since the last
+ * flush, as otherwise base_time and clock might not be correct because of a
+ * still pre-rolling sink
+ */
+ upstream_earliest_time =
+ gst_adaptive_demux_get_qos_earliest_time ((GstAdaptiveDemux *) dashdemux);
+ if (upstream_earliest_time != GST_CLOCK_TIME_NONE) {
+ GstClock *clock;
+
+ clock = gst_element_get_clock (GST_ELEMENT_CAST (dashdemux));
+
+ if (clock) {
+ GstClockTime base_time;
+ GstClockTime now_time;
+
+ base_time = gst_element_get_base_time (GST_ELEMENT_CAST (dashdemux));
+ now_time = gst_clock_get_time (clock);
+ if (now_time > base_time)
+ now_time -= base_time;
+ else
+ now_time = 0;
+
+ gst_object_unref (clock);
+
+ earliest_time = MAX (now_time, upstream_earliest_time);
+ } else {
+ earliest_time = upstream_earliest_time;
+ }
+ }
+
+ /* our current position in running time */
+ cur_running =
+ gst_segment_to_running_time (&stream->segment, GST_FORMAT_TIME,
+ cur_position);
+
+ /* the minimum position we have to skip to in running time */
+ min_running =
+ gst_segment_to_running_time (&stream->segment, GST_FORMAT_TIME,
+ min_position);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "position: current %" GST_TIME_FORMAT " min next %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (cur_position), GST_TIME_ARGS (min_position));
+ GST_DEBUG_OBJECT (stream->pad,
+ "running time: current %" GST_TIME_FORMAT " min next %" GST_TIME_FORMAT
+ " earliest %" GST_TIME_FORMAT, GST_TIME_ARGS (cur_running),
+ GST_TIME_ARGS (min_running), GST_TIME_ARGS (earliest_time));
+
+ /* Take configured maximum video bandwidth and framerate into account */
+ {
+ GstClockTime min_run_dist, min_frame_dist, diff = 0;
+ guint max_fps_n, max_fps_d;
+
+ min_run_dist = min_skip / ABS (stream->segment.rate);
+
+ if (dashdemux->max_video_framerate_n != 0) {
+ max_fps_n = dashdemux->max_video_framerate_n;
+ max_fps_d = dashdemux->max_video_framerate_d;
+ } else {
+ /* more than 10 fps is not very useful if we're skipping anyway */
+ max_fps_n = 10;
+ max_fps_d = 1;
+ }
+
+ min_frame_dist = gst_util_uint64_scale_ceil (GST_SECOND,
+ max_fps_d, max_fps_n);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Have max framerate %d/%d - Min dist %" GST_TIME_FORMAT
+ ", min requested dist %" GST_TIME_FORMAT,
+ max_fps_n, max_fps_d,
+ GST_TIME_ARGS (min_run_dist), GST_TIME_ARGS (min_frame_dist));
+ if (min_frame_dist > min_run_dist)
+ diff = MAX (diff, min_frame_dist - min_run_dist);
+
+ if (dashdemux->max_bitrate != 0) {
+ guint64 max_bitrate = gst_util_uint64_scale_ceil (GST_SECOND,
+ 8 * dashstream->keyframe_average_size,
+ dashstream->keyframe_average_distance) * ABS (stream->segment.rate);
+
+ if (max_bitrate > dashdemux->max_bitrate) {
+ min_frame_dist = gst_util_uint64_scale_ceil (GST_SECOND,
+ 8 * dashstream->keyframe_average_size,
+ dashdemux->max_bitrate) * ABS (stream->segment.rate);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Have max bitrate %u - Min dist %" GST_TIME_FORMAT
+ ", min requested dist %" GST_TIME_FORMAT, dashdemux->max_bitrate,
+ GST_TIME_ARGS (min_run_dist), GST_TIME_ARGS (min_frame_dist));
+ if (min_frame_dist > min_run_dist)
+ diff = MAX (diff, min_frame_dist - min_run_dist);
+ }
+ }
+
+ if (diff > 0) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "Skipping further ahead by %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+ min_running += diff;
+ }
+ }
+
+ if (earliest_time == GST_CLOCK_TIME_NONE) {
+ GstClockTime run_key_dist;
+
+ run_key_dist =
+ dashstream->keyframe_average_distance / ABS (stream->segment.rate);
+
+ /* If we don't have downstream information (such as at startup or
+ * without live sinks), just get the next time by taking the minimum
+ * amount we have to skip ahead
+ * Except if it takes us longer to download */
+ if (run_key_dist > dashstream->average_download_time)
+ ret =
+ gst_segment_position_from_running_time (&stream->segment,
+ GST_FORMAT_TIME, min_running);
+ else
+ ret = gst_segment_position_from_running_time (&stream->segment,
+ GST_FORMAT_TIME,
+ min_running - run_key_dist + dashstream->average_download_time);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Advancing to %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+
+ goto out;
+ }
+
+ /* Figure out the difference, in running time, between where we are and
+ * where downstream is */
+ diff = min_running - earliest_time;
+ GST_LOG_OBJECT (stream->pad,
+ "min_running %" GST_TIME_FORMAT " diff %" GST_STIME_FORMAT
+ " average_download %" GST_TIME_FORMAT, GST_TIME_ARGS (min_running),
+ GST_STIME_ARGS (diff), GST_TIME_ARGS (dashstream->average_download_time));
+
+ /* Have at least 500ms or 3 keyframes safety between current position and downstream */
+ deadline = MAX (500 * GST_MSECOND, 3 * dashstream->average_download_time);
+
+ /* The furthest away we are from the current position, the least we need to advance */
+ if (diff < 0 || diff < deadline) {
+ /* Force skipping (but not more than 1s ahead) */
+ ret =
+ gst_segment_position_from_running_time (&stream->segment,
+ GST_FORMAT_TIME, earliest_time + MIN (deadline, GST_SECOND));
+ GST_DEBUG_OBJECT (stream->pad,
+ "MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+ } else if (diff < 4 * dashstream->average_download_time) {
+ /* Go forward a bit less aggressively (and at most 1s forward) */
+ ret = gst_segment_position_from_running_time (&stream->segment,
+ GST_FORMAT_TIME, min_running + MIN (GST_SECOND,
+ 2 * dashstream->average_download_time));
+ GST_DEBUG_OBJECT (stream->pad,
+ "MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+ } else {
+ /* Get the next position satisfying the download time */
+ ret = gst_segment_position_from_running_time (&stream->segment,
+ GST_FORMAT_TIME, min_running);
+ GST_DEBUG_OBJECT (stream->pad,
+ "Advance to %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+ }
+
+ out:
+
+ {
+ GstClockTime cur_skip =
+ (cur_position < ret) ? ret - cur_position : cur_position - ret;
+
+ if (dashstream->average_skip_size == 0) {
+ dashstream->average_skip_size = cur_skip;
+ } else {
+ dashstream->average_skip_size =
+ (cur_skip + 3 * dashstream->average_skip_size) / 4;
+ }
+
+ if (dashstream->average_skip_size >
+ cur_skip + dashstream->keyframe_average_distance
+ && dashstream->average_skip_size > min_skip) {
+ if (stream->segment.rate > 0)
+ ret = cur_position + dashstream->average_skip_size;
+ else if (cur_position > dashstream->average_skip_size)
+ ret = cur_position - dashstream->average_skip_size;
+ else
+ ret = 0;
+ }
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_stream_advance_fragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ GstClockTime target_time = GST_CLOCK_TIME_NONE;
+ GstClockTime previous_position;
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (stream->pad, "Advance fragment");
+
+ /* Update download statistics */
+ if (dashstream->moof_sync_samples &&
+ GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux) &&
+ GST_CLOCK_TIME_IS_VALID (stream->last_download_time)) {
+ if (GST_CLOCK_TIME_IS_VALID (dashstream->average_download_time)) {
+ dashstream->average_download_time =
+ (3 * dashstream->average_download_time +
+ stream->last_download_time) / 4;
+ } else {
+ dashstream->average_download_time = stream->last_download_time;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Download time last: %" GST_TIME_FORMAT " average: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->last_download_time),
+ GST_TIME_ARGS (dashstream->average_download_time));
+ }
+
+ previous_position = dashstream->actual_position;
+
+ /* Update internal position */
+ if (GST_CLOCK_TIME_IS_VALID (dashstream->actual_position)) {
+ GstClockTime dur;
+ if (dashstream->moof_sync_samples
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+ GST_LOG_OBJECT (stream->pad, "current sync sample #%d",
+ dashstream->current_sync_sample);
+ if (dashstream->current_sync_sample == -1) {
+ dur = 0;
+ } else if (dashstream->current_sync_sample <
+ dashstream->moof_sync_samples->len) {
+ dur = dashstream->current_fragment_keyframe_distance;
+ } else {
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client) &&
+ dashstream->sidx_position != GST_CLOCK_TIME_NONE
+ && SIDX (dashstream)->entries) {
+ GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+ dur = entry->duration;
+ } else {
+ dur =
+ dashstream->current_fragment_timestamp +
+ dashstream->current_fragment_duration -
+ dashstream->actual_position;
+ }
+ }
+ } else if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client) &&
+ dashstream->sidx_position != GST_CLOCK_TIME_NONE
+ && SIDX (dashstream)->entries) {
+ GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+ dur = entry->duration;
+ } else {
+ dur = stream->fragment.duration;
+ }
+
+ if (dashstream->moof_sync_samples
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+ /* We just downloaded the header, we actually use the previous
+ * target_time now as it was not used up yet */
+ if (dashstream->current_sync_sample == -1)
+ target_time = dashstream->target_time;
+ else
+ target_time =
+ gst_dash_demux_stream_get_target_time (dashdemux, stream,
+ dashstream->actual_position, dur);
+ dashstream->actual_position = target_time;
+ } else {
+ /* Adjust based on direction */
+ if (stream->demux->segment.rate > 0.0)
+ dashstream->actual_position += dur;
+ else if (dashstream->actual_position >= dur)
+ dashstream->actual_position -= dur;
+ else
+ dashstream->actual_position = 0;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "Actual position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dashstream->actual_position));
+ }
+ dashstream->target_time = target_time;
+
+ GST_DEBUG_OBJECT (stream->pad, "target_time: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (target_time));
+
+ /* If downloading only keyframes, switch to the next one or fall through */
+ if (dashstream->moof_sync_samples &&
+ GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+ if (gst_dash_demux_stream_advance_sync_sample (stream, target_time))
+ return GST_FLOW_OK;
+ }
+
+ dashstream->isobmff_parser.current_fourcc = 0;
+ dashstream->isobmff_parser.current_start_offset = 0;
+ dashstream->isobmff_parser.current_size = 0;
+
+ if (dashstream->moof)
+ gst_isoff_moof_box_free (dashstream->moof);
+ dashstream->moof = NULL;
+ if (dashstream->moof_sync_samples)
+ g_array_free (dashstream->moof_sync_samples, TRUE);
+ dashstream->moof_sync_samples = NULL;
+ dashstream->current_sync_sample = -1;
+
+ /* Check if we just need to 'advance' to the next fragment, or if we
+ * need to skip by more. */
+ if (GST_CLOCK_TIME_IS_VALID (target_time)
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux) &&
+ dashstream->active_stream->mimeType == GST_STREAM_VIDEO) {
+ GstClockTime actual_ts;
+ GstSeekFlags flags = 0;
+
+ /* Key-unit trick mode, seek to fragment containing target time
+ *
+ * We first try seeking without snapping. As above code to skip keyframes
+ * in the current fragment was not successful, we should go at least one
+ * fragment ahead. Due to rounding errors we could end up at the same
+ * fragment again here, in which case we retry seeking with the SNAP_AFTER
+ * flag.
+ *
+ * We don't always set that flag as we would then end up one further
+ * fragment in the future in all good cases.
+ */
+ while (TRUE) {
+ ret =
+ gst_dash_demux_stream_seek (stream, (stream->segment.rate > 0), flags,
+ target_time, &actual_ts);
+
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (stream->pad, "Failed to seek to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (target_time));
+ /* Give up */
+ if (flags != 0)
+ break;
+
+ /* Retry with skipping ahead */
+ flags |= GST_SEEK_FLAG_SNAP_AFTER;
+ continue;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Skipped to %" GST_TIME_FORMAT " (wanted %" GST_TIME_FORMAT ", was %"
+ GST_TIME_FORMAT ")", GST_TIME_ARGS (actual_ts),
+ GST_TIME_ARGS (target_time), GST_TIME_ARGS (previous_position));
+
+ if ((stream->segment.rate > 0 && actual_ts <= previous_position) ||
+ (stream->segment.rate < 0 && actual_ts >= previous_position)) {
+ /* Give up */
+ if (flags != 0)
+ break;
+
+ /* Retry with forcing skipping ahead */
+ flags |= GST_SEEK_FLAG_SNAP_AFTER;
+
+ continue;
+ }
+
+ /* All good */
+ break;
+ }
+ } else {
+ /* Normal mode, advance to the next fragment */
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client)) {
+ if (gst_dash_demux_stream_advance_subfragment (stream))
+ return GST_FLOW_OK;
+ }
+
+ if (dashstream->adapter)
+ gst_adapter_clear (dashstream->adapter);
+
+ gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+ dashstream->sidx_base_offset = 0;
+ dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+ dashstream->allow_sidx = TRUE;
+
+ ret = gst_mpd_client_advance_segment (dashdemux->client,
+ dashstream->active_stream, stream->demux->segment.rate > 0.0);
+ }
+ return ret;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_select_bitrate (GstAdaptiveDemuxStream * stream,
+ guint64 bitrate)
+ {
+ GstActiveStream *active_stream = NULL;
+ GList *rep_list = NULL;
+ gint new_index;
+ GstAdaptiveDemux *base_demux = stream->demux;
+ GstDashDemux *demux = GST_DASH_DEMUX_CAST (stream->demux);
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ gboolean ret = FALSE;
+
+ active_stream = dashstream->active_stream;
+ if (active_stream == NULL) {
+ goto end;
+ }
+
+ /* In key-frame trick mode don't change bitrates */
+ if (GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)) {
+ GST_DEBUG_OBJECT (demux, "In key-frame trick mode, not changing bitrates");
+ goto end;
+ }
+
+ /* retrieve representation list */
+ if (active_stream->cur_adapt_set)
+ rep_list = active_stream->cur_adapt_set->Representations;
+ if (!rep_list) {
+ goto end;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Trying to change to bitrate: %" G_GUINT64_FORMAT, bitrate);
+
+ if (active_stream->mimeType == GST_STREAM_VIDEO && demux->max_bitrate) {
+ bitrate = MIN (demux->max_bitrate, bitrate);
+ }
+
+ /* get representation index with current max_bandwidth */
+ if (GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (base_demux) ||
+ ABS (base_demux->segment.rate) <= 1.0) {
+ new_index =
+ gst_mpd_client_get_rep_idx_with_max_bandwidth (rep_list, bitrate,
+ demux->max_video_width, demux->max_video_height,
+ demux->max_video_framerate_n, demux->max_video_framerate_d);
+ } else {
+ new_index =
+ gst_mpd_client_get_rep_idx_with_max_bandwidth (rep_list,
+ bitrate / ABS (base_demux->segment.rate), demux->max_video_width,
+ demux->max_video_height, demux->max_video_framerate_n,
+ demux->max_video_framerate_d);
+ }
+
+ /* if no representation has the required bandwidth, take the lowest one */
+ if (new_index == -1)
+ new_index = gst_mpd_client_get_rep_idx_with_min_bandwidth (rep_list);
+
+ if (new_index != active_stream->representation_idx) {
+ GstMPDRepresentationNode *rep = g_list_nth_data (rep_list, new_index);
+ GST_INFO_OBJECT (demux, "Changing representation idx: %d %d %u",
+ dashstream->index, new_index, rep->bandwidth);
+ if (gst_mpd_client_setup_representation (demux->client, active_stream, rep)) {
+ GstCaps *caps;
+
+ GST_INFO_OBJECT (demux, "Switching bitrate to %d",
+ active_stream->cur_representation->bandwidth);
+ caps = gst_dash_demux_get_input_caps (demux, active_stream);
+ gst_adaptive_demux_stream_set_caps (stream, caps);
+ ret = TRUE;
+
+ } else {
+ GST_WARNING_OBJECT (demux, "Can not switch representation, aborting...");
+ }
+ }
+
+ if (ret) {
+ if (gst_mpd_client_has_isoff_ondemand_profile (demux->client)
+ && SIDX (dashstream)->entries) {
+ /* store our current position to change to the same one in a different
+ * representation if needed */
+ if (SIDX (dashstream)->entry_index < SIDX (dashstream)->entries_count)
+ dashstream->sidx_position = SIDX_CURRENT_ENTRY (dashstream)->pts;
+ else if (SIDX (dashstream)->entry_index >=
+ SIDX (dashstream)->entries_count)
+ dashstream->sidx_position =
+ SIDX_ENTRY (dashstream,
+ SIDX (dashstream)->entries_count - 1)->pts + SIDX_ENTRY (dashstream,
+ SIDX (dashstream)->entries_count - 1)->duration;
+ else
+ dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+ } else {
+ dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+ }
+
+ gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+ dashstream->sidx_base_offset = 0;
+ dashstream->allow_sidx = TRUE;
+
+ /* Reset ISOBMFF box parsing state */
+ dashstream->isobmff_parser.current_fourcc = 0;
+ dashstream->isobmff_parser.current_start_offset = 0;
+ dashstream->isobmff_parser.current_size = 0;
+
+ dashstream->current_offset = -1;
+ dashstream->current_index_header_or_data = 0;
+
+ if (dashstream->adapter)
+ gst_adapter_clear (dashstream->adapter);
+
+ if (dashstream->moof)
+ gst_isoff_moof_box_free (dashstream->moof);
+ dashstream->moof = NULL;
+ if (dashstream->moof_sync_samples)
+ g_array_free (dashstream->moof_sync_samples, TRUE);
+ dashstream->moof_sync_samples = NULL;
+ dashstream->current_sync_sample = -1;
+ dashstream->target_time = GST_CLOCK_TIME_NONE;
+ }
+
+ end:
+ return ret;
+ }
+
+ #define SEEK_UPDATES_PLAY_POSITION(r, start_type, stop_type) \
+ ((r >= 0 && start_type != GST_SEEK_TYPE_NONE) || \
+ (r < 0 && stop_type != GST_SEEK_TYPE_NONE))
+
+ static gboolean
+ gst_dash_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek)
+ {
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ GList *list;
+ GstClockTime current_pos, target_pos;
+ guint current_period;
+ GstStreamPeriod *period;
+ GList *iter, *streams = NULL;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ gboolean trickmode_no_audio;
+
+ gst_event_parse_seek (seek, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (!SEEK_UPDATES_PLAY_POSITION (rate, start_type, stop_type)) {
+ /* nothing to do if we don't have to update the current position */
+ return TRUE;
+ }
+
+ if (demux->segment.rate > 0.0) {
+ target_pos = (GstClockTime) start;
+ } else {
+ target_pos = (GstClockTime) stop;
+ }
+
+ /* select the requested Period in the Media Presentation */
+ if (!gst_mpd_client_setup_media_presentation (dashdemux->client, target_pos,
+ -1, NULL))
+ return FALSE;
+
+ current_period = 0;
+ for (list = g_list_first (dashdemux->client->periods); list;
+ list = g_list_next (list)) {
+ period = list->data;
+ current_pos = period->start;
+ current_period = period->number;
+ GST_DEBUG_OBJECT (demux, "Looking at period %u) start:%"
+ GST_TIME_FORMAT " - duration:%"
+ GST_TIME_FORMAT ") for position %" GST_TIME_FORMAT,
+ current_period, GST_TIME_ARGS (current_pos),
+ GST_TIME_ARGS (period->duration), GST_TIME_ARGS (target_pos));
+ if (current_pos <= target_pos
+ && target_pos <= current_pos + period->duration) {
+ break;
+ }
+ }
+ if (list == NULL) {
+ GST_WARNING_OBJECT (demux, "Could not find seeked Period");
+ return FALSE;
+ }
+
+ trickmode_no_audio = ! !(flags & GST_SEEK_FLAG_TRICKMODE_NO_AUDIO);
+
+ streams = demux->streams;
+ if (current_period != gst_mpd_client_get_period_index (dashdemux->client)) {
+ GST_DEBUG_OBJECT (demux, "Seeking to Period %d", current_period);
+
+ /* clean old active stream list, if any */
+ gst_mpd_client_active_streams_free (dashdemux->client);
+ dashdemux->trickmode_no_audio = trickmode_no_audio;
+
+ /* setup video, audio and subtitle streams, starting from the new Period */
+ if (!gst_mpd_client_set_period_index (dashdemux->client, current_period)
+ || !gst_dash_demux_setup_all_streams (dashdemux))
+ return FALSE;
+ streams = demux->next_streams;
+ } else if (dashdemux->trickmode_no_audio != trickmode_no_audio) {
+ /* clean old active stream list, if any */
+ gst_mpd_client_active_streams_free (dashdemux->client);
+ dashdemux->trickmode_no_audio = trickmode_no_audio;
+
+ /* setup video, audio and subtitle streams, starting from the new Period */
+ if (!gst_dash_demux_setup_all_streams (dashdemux))
+ return FALSE;
+ streams = demux->next_streams;
+ }
+
+ /* Update the current sequence on all streams */
+ for (iter = streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ GstDashDemuxStream *dashstream = iter->data;
+
+ dashstream->average_skip_size = 0;
+ if (gst_dash_demux_stream_seek (stream, rate >= 0, 0, target_pos,
+ NULL) != GST_FLOW_OK)
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gint64
+ gst_dash_demux_get_manifest_update_interval (GstAdaptiveDemux * demux)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ return MIN (dashdemux->client->mpd_root_node->minimumUpdatePeriod * 1000,
+ SLOW_CLOCK_UPDATE_INTERVAL);
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_update_manifest_data (GstAdaptiveDemux * demux,
+ GstBuffer * buffer)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ GstMPDClient *new_client = NULL;
+ GstMapInfo mapinfo;
+
+ GST_DEBUG_OBJECT (demux, "Updating manifest file from URL");
+
+ /* parse the manifest file */
+ new_client = gst_mpd_client_new ();
+ gst_mpd_client_set_uri_downloader (new_client, demux->downloader);
+ new_client->mpd_uri = g_strdup (demux->manifest_uri);
+ new_client->mpd_base_uri = g_strdup (demux->manifest_base_uri);
+ gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
+
+ if (gst_mpd_client_parse (new_client, (gchar *) mapinfo.data, mapinfo.size)) {
+ const gchar *period_id;
+ guint period_idx;
+ GList *iter;
+ GList *streams_iter;
+ GList *streams;
+
+ /* prepare the new manifest and try to transfer the stream position
+ * status from the old manifest client */
+
+ GST_DEBUG_OBJECT (demux, "Updating manifest");
+
+ period_id = gst_mpd_client_get_period_id (dashdemux->client);
+ period_idx = gst_mpd_client_get_period_index (dashdemux->client);
+
+ /* setup video, audio and subtitle streams, starting from current Period */
+ if (!gst_mpd_client_setup_media_presentation (new_client, -1,
+ (period_id ? -1 : period_idx), period_id)) {
+ /* TODO */
+ }
+
+ if (period_id) {
+ if (!gst_mpd_client_set_period_id (new_client, period_id)) {
+ GST_DEBUG_OBJECT (demux, "Error setting up the updated manifest file");
+ gst_mpd_client_free (new_client);
+ gst_buffer_unmap (buffer, &mapinfo);
+ return GST_FLOW_EOS;
+ }
+ } else {
+ if (!gst_mpd_client_set_period_index (new_client, period_idx)) {
+ GST_DEBUG_OBJECT (demux, "Error setting up the updated manifest file");
+ gst_mpd_client_free (new_client);
+ gst_buffer_unmap (buffer, &mapinfo);
+ return GST_FLOW_EOS;
+ }
+ }
+
+ if (!gst_dash_demux_setup_mpdparser_streams (dashdemux, new_client)) {
+ GST_ERROR_OBJECT (demux, "Failed to setup streams on manifest " "update");
+ gst_mpd_client_free (new_client);
+ gst_buffer_unmap (buffer, &mapinfo);
+ return GST_FLOW_ERROR;
+ }
+
+ /* If no pads have been exposed yet, need to use those */
+ streams = NULL;
+ if (demux->streams == NULL) {
+ if (demux->prepared_streams) {
+ streams = demux->prepared_streams;
+ }
+ } else {
+ streams = demux->streams;
+ }
+
+ /* update the streams to play from the next segment */
+ for (iter = streams, streams_iter = new_client->active_streams;
+ iter && streams_iter;
+ iter = g_list_next (iter), streams_iter = g_list_next (streams_iter)) {
+ GstDashDemuxStream *demux_stream = iter->data;
+ GstActiveStream *new_stream = streams_iter->data;
+ GstClockTime ts;
+
+ if (!new_stream) {
+ GST_DEBUG_OBJECT (demux,
+ "Stream of index %d is missing from manifest update",
+ demux_stream->index);
+ gst_mpd_client_free (new_client);
+ gst_buffer_unmap (buffer, &mapinfo);
+ return GST_FLOW_EOS;
+ }
+
+ if (gst_mpd_client_get_next_fragment_timestamp (dashdemux->client,
+ demux_stream->index, &ts)
+ || gst_mpd_client_get_last_fragment_timestamp_end (dashdemux->client,
+ demux_stream->index, &ts)) {
+
+ /* Due to rounding when doing the timescale conversions it might happen
+ * that the ts falls back to a previous segment, leading the same data
+ * to be downloaded twice. We try to work around this by always adding
+ * 10 microseconds to get back to the correct segment. The errors are
+ * usually on the order of nanoseconds so it should be enough.
+ */
+
+ /* _get_next_fragment_timestamp() returned relative timestamp to
+ * corresponding period start, but _client_stream_seek expects absolute
+ * MPD time. */
+ ts += gst_mpd_client_get_period_start_time (dashdemux->client);
+
+ GST_DEBUG_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (demux_stream),
+ "Current position: %" GST_TIME_FORMAT ", updating to %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (ts),
+ GST_TIME_ARGS (ts + (10 * GST_USECOND)));
+ ts += 10 * GST_USECOND;
+ gst_mpd_client_stream_seek (new_client, new_stream,
+ demux->segment.rate >= 0, 0, ts, NULL);
+ }
+
+ demux_stream->active_stream = new_stream;
+ }
+
+ gst_mpd_client_free (dashdemux->client);
+ dashdemux->client = new_client;
+
+ GST_DEBUG_OBJECT (demux, "Manifest file successfully updated");
+ if (dashdemux->clock_drift) {
+ gst_dash_demux_poll_clock_drift (dashdemux);
+ }
+ } else {
+ /* In most cases, this will happen if we set a wrong url in the
+ * source element and we have received the 404 HTML response instead of
+ * the manifest */
+ GST_WARNING_OBJECT (demux, "Error parsing the manifest.");
+ gst_mpd_client_free (new_client);
+ gst_buffer_unmap (buffer, &mapinfo);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_unmap (buffer, &mapinfo);
+
+ return GST_FLOW_OK;
+ }
+
+ static gint64
+ gst_dash_demux_stream_get_fragment_waiting_time (GstAdaptiveDemuxStream *
+ stream)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+ GstDateTime *segmentAvailability;
+ GstActiveStream *active_stream = dashstream->active_stream;
+
+ segmentAvailability =
+ gst_mpd_client_get_next_segment_availability_start_time
+ (dashdemux->client, active_stream);
+
+ if (segmentAvailability) {
+ gint64 diff;
+ GstDateTime *cur_time;
+
+ cur_time =
+ gst_date_time_new_from_g_date_time
+ (gst_adaptive_demux_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST
+ (dashdemux)));
+ diff =
+ gst_mpd_client_calculate_time_difference (cur_time,
+ segmentAvailability);
+ gst_date_time_unref (segmentAvailability);
+ gst_date_time_unref (cur_time);
+ /* subtract the server's clock drift, so that if the server's
+ time is behind our idea of UTC, we need to sleep for longer
+ before requesting a fragment */
+ return diff -
+ gst_dash_demux_get_clock_compensation (dashdemux) * GST_USECOND;
+ }
+ return 0;
+ }
+
+ static gboolean
+ gst_dash_demux_has_next_period (GstAdaptiveDemux * demux)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+ if (demux->segment.rate >= 0)
+ return gst_mpd_client_has_next_period (dashdemux->client);
+ else
+ return gst_mpd_client_has_previous_period (dashdemux->client);
+ }
+
+ static void
+ gst_dash_demux_advance_period (GstAdaptiveDemux * demux)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+ if (demux->segment.rate >= 0) {
+ if (!gst_mpd_client_set_period_index (dashdemux->client,
+ gst_mpd_client_get_period_index (dashdemux->client) + 1)) {
+ /* TODO error */
+ return;
+ }
+ } else {
+ if (!gst_mpd_client_set_period_index (dashdemux->client,
+ gst_mpd_client_get_period_index (dashdemux->client) - 1)) {
+ /* TODO error */
+ return;
+ }
+ }
+
+ gst_dash_demux_setup_all_streams (dashdemux);
+ gst_mpd_client_seek_to_first_segment (dashdemux->client);
+ }
+
+ static GstBuffer *
+ _gst_buffer_split (GstBuffer * buffer, gint offset, gsize size)
+ {
+ GstBuffer *newbuf = gst_buffer_copy_region (buffer,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_META
+ | GST_BUFFER_COPY_MEMORY, offset, size == -1 ? size : size - offset);
+
+ gst_buffer_resize (buffer, 0, offset);
+
+ return newbuf;
+ }
+
+ static gboolean
+ gst_dash_demux_stream_fragment_start (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ GST_LOG_OBJECT (stream->pad, "Actual position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dashstream->actual_position));
+
+ dashstream->current_index_header_or_data = 0;
+ dashstream->current_offset = -1;
+
+ /* We need to mark every first buffer of a key unit as discont,
+ * and also every first buffer of a moov and moof. This ensures
+ * that qtdemux takes note of our buffer offsets for each of those
+ * buffers instead of keeping track of them itself from the first
+ * buffer. We need offsets to be consistent between moof and mdat
+ */
+ if (dashstream->is_isobmff && dashdemux->allow_trickmode_key_units
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)
+ && dashstream->active_stream->mimeType == GST_STREAM_VIDEO)
+ stream->discont = TRUE;
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_stream_fragment_finished (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ /* We need to mark every first buffer of a key unit as discont,
+ * and also every first buffer of a moov and moof. This ensures
+ * that qtdemux takes note of our buffer offsets for each of those
+ * buffers instead of keeping track of them itself from the first
+ * buffer. We need offsets to be consistent between moof and mdat
+ */
+ if (dashstream->is_isobmff && dashdemux->allow_trickmode_key_units
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)
+ && dashstream->active_stream->mimeType == GST_STREAM_VIDEO)
+ stream->discont = TRUE;
+
+ /* Only handle fragment advancing specifically for SIDX if we're not
+ * in key unit mode */
+ if (!(dashstream->moof_sync_samples
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux))
+ && gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client)
+ && dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ /* fragment is advanced on data_received when byte limits are reached */
+ if (dashstream->pending_seek_ts != GST_CLOCK_TIME_NONE) {
+ if (SIDX (dashstream)->entry_index < SIDX (dashstream)->entries_count)
+ return GST_FLOW_OK;
+ } else if (gst_dash_demux_stream_has_next_subfragment (stream)) {
+ return GST_FLOW_OK;
+ }
+ }
+
+ if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+ return GST_FLOW_OK;
+
+ return gst_adaptive_demux_stream_advance_fragment (demux, stream,
+ stream->fragment.duration);
+ }
+
+ static gboolean
+ gst_dash_demux_need_another_chunk (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemux *dashdemux = (GstDashDemux *) stream->demux;
+ GstDashDemuxStream *dashstream = (GstDashDemuxStream *) stream;
+
+ /* We're chunked downloading for ISOBMFF in KEY_UNITS mode for the actual
+ * fragment until we parsed the moof and arrived at the mdat. 8192 is a
+ * random guess for the moof size
+ */
+ if (dashstream->is_isobmff
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)
+ && dashstream->active_stream->mimeType == GST_STREAM_VIDEO
+ && !stream->downloading_header && !stream->downloading_index
+ && dashdemux->allow_trickmode_key_units) {
+ if (dashstream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT) {
+ /* Need to download the moof first to know anything */
+
+ stream->fragment.chunk_size = 8192;
+ /* Do we have the first fourcc already or are we in the middle */
+ if (dashstream->isobmff_parser.current_fourcc == 0) {
+ stream->fragment.chunk_size += dashstream->moof_average_size;
+ if (dashstream->first_sync_sample_always_after_moof) {
+ gboolean first = FALSE;
+ /* Check if we'll really need that first sample */
+ if (GST_CLOCK_TIME_IS_VALID (dashstream->target_time)) {
+ first =
+ ((dashstream->target_time -
+ dashstream->current_fragment_timestamp) /
+ dashstream->keyframe_average_distance) == 0 ? TRUE : FALSE;
+ } else if (stream->segment.rate > 0) {
+ first = TRUE;
+ }
+
+ if (first)
+ stream->fragment.chunk_size += dashstream->keyframe_average_size;
+ }
+ }
+
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client) &&
+ dashstream->sidx_parser.sidx.entries) {
+ guint64 sidx_start_offset =
+ dashstream->sidx_base_offset +
+ SIDX_CURRENT_ENTRY (dashstream)->offset;
+ guint64 sidx_end_offset =
+ sidx_start_offset + SIDX_CURRENT_ENTRY (dashstream)->size;
+ guint64 downloaded_end_offset;
+
+ if (dashstream->current_offset == GST_CLOCK_TIME_NONE) {
+ downloaded_end_offset = sidx_start_offset;
+ } else {
+ downloaded_end_offset =
+ dashstream->current_offset +
+ gst_adapter_available (dashstream->adapter);
+ }
+
+ downloaded_end_offset = MAX (downloaded_end_offset, sidx_start_offset);
+
+ if (stream->fragment.chunk_size +
+ downloaded_end_offset > sidx_end_offset) {
+ stream->fragment.chunk_size = sidx_end_offset - downloaded_end_offset;
+ }
+ }
+ } else if (dashstream->moof && dashstream->moof_sync_samples) {
+ /* Have the moof, either we're done now or we want to download the
+ * directly following sync sample */
+ if (dashstream->first_sync_sample_after_moof
+ && dashstream->current_sync_sample == 0) {
+ GstDashStreamSyncSample *sync_sample =
+ &g_array_index (dashstream->moof_sync_samples,
+ GstDashStreamSyncSample, 0);
+ guint64 end_offset = sync_sample->end_offset + 1;
+ guint64 downloaded_end_offset;
+
+ downloaded_end_offset =
+ dashstream->current_offset +
+ gst_adapter_available (dashstream->adapter);
+
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client) &&
+ dashstream->sidx_parser.sidx.entries) {
+ guint64 sidx_end_offset =
+ dashstream->sidx_base_offset +
+ SIDX_CURRENT_ENTRY (dashstream)->offset +
+ SIDX_CURRENT_ENTRY (dashstream)->size;
+
+ if (end_offset > sidx_end_offset) {
+ end_offset = sidx_end_offset;
+ }
+ }
+
+ if (downloaded_end_offset < end_offset) {
+ stream->fragment.chunk_size = end_offset - downloaded_end_offset;
+ } else {
+ stream->fragment.chunk_size = 0;
+ }
+ } else {
+ stream->fragment.chunk_size = 0;
+ }
+ } else {
+ /* Have moof but can't do key-units mode, just download until the end */
+ stream->fragment.chunk_size = -1;
+ }
+ } else {
+ /* We might've decided that we can't allow key-unit only
+ * trickmodes while doing chunked downloading. In that case
+ * just download from here to the end now */
+ if (dashstream->moof
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+ stream->fragment.chunk_size = -1;
+ } else {
+ stream->fragment.chunk_size = 0;
+ }
+ }
+
+ return stream->fragment.chunk_size != 0;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_parse_isobmff (GstAdaptiveDemux * demux,
+ GstDashDemuxStream * dash_stream, gboolean * sidx_seek_needed)
+ {
+ GstAdaptiveDemuxStream *stream = (GstAdaptiveDemuxStream *) dash_stream;
+ GstDashDemux *dashdemux = GST_DASH_DEMUX_CAST (demux);
+ gsize available;
+ GstBuffer *buffer;
+ GstMapInfo map;
+ GstByteReader reader;
+ guint32 fourcc;
+ guint header_size;
+ guint64 size, buffer_offset;
+
+ *sidx_seek_needed = FALSE;
+
+ /* This must not be called when we're in the mdat. We only look at the mdat
+ * header and then stop parsing the boxes as we're only interested in the
+ * metadata! Handling mdat is the job of the surrounding code, as well as
+ * stopping or starting the next fragment when mdat is over (=> sidx)
+ */
+ g_assert (dash_stream->isobmff_parser.current_fourcc !=
+ GST_ISOFF_FOURCC_MDAT);
+
+ available = gst_adapter_available (dash_stream->adapter);
+ buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+ buffer_offset = dash_stream->current_offset;
+
+ /* Always at the start of a box here */
+ g_assert (dash_stream->isobmff_parser.current_size == 0);
+
+ /* At the start of a box => Parse it */
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ /* While there are more boxes left to parse ... */
+ dash_stream->isobmff_parser.current_start_offset = buffer_offset;
+ do {
+ dash_stream->isobmff_parser.current_fourcc = 0;
+ dash_stream->isobmff_parser.current_size = 0;
+
+ if (!gst_isoff_parse_box_header (&reader, &fourcc, NULL, &header_size,
+ &size)) {
+ break;
+ }
+
+ dash_stream->isobmff_parser.current_fourcc = fourcc;
+ if (size == 0) {
+ /* We assume this is mdat, anything else with "size until end"
+ * does not seem to make sense */
+ g_assert (dash_stream->isobmff_parser.current_fourcc ==
+ GST_ISOFF_FOURCC_MDAT);
+ dash_stream->isobmff_parser.current_size = -1;
+ break;
+ }
+
+ dash_stream->isobmff_parser.current_size = size;
+
+ /* Do we have the complete box or are at MDAT */
+ if (gst_byte_reader_get_remaining (&reader) < size - header_size ||
+ dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MDAT) {
+ /* Reset byte reader to the beginning of the box */
+ gst_byte_reader_set_pos (&reader,
+ gst_byte_reader_get_pos (&reader) - header_size);
+ break;
+ }
+
+ GST_LOG_OBJECT (stream->pad,
+ "box %" GST_FOURCC_FORMAT " at offset %" G_GUINT64_FORMAT " size %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc),
+ dash_stream->isobmff_parser.current_start_offset, size);
+
+ if (dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MOOF) {
+ GstByteReader sub_reader;
+
+ /* Only allow SIDX before the very first moof */
+ dash_stream->allow_sidx = FALSE;
+
+ g_assert (dash_stream->moof == NULL);
+ g_assert (dash_stream->moof_sync_samples == NULL);
+ gst_byte_reader_get_sub_reader (&reader, &sub_reader, size - header_size);
+ dash_stream->moof = gst_isoff_moof_box_parse (&sub_reader);
+ dash_stream->moof_offset =
+ dash_stream->isobmff_parser.current_start_offset;
+ dash_stream->moof_size = size;
+ dash_stream->current_sync_sample = -1;
+
+ if (dash_stream->moof_average_size) {
+ if (dash_stream->moof_average_size < size)
+ dash_stream->moof_average_size =
+ (size * 3 + dash_stream->moof_average_size) / 4;
+ else
+ dash_stream->moof_average_size =
+ (size + dash_stream->moof_average_size + 3) / 4;
+ } else {
+ dash_stream->moof_average_size = size;
+ }
+ } else if (dash_stream->isobmff_parser.current_fourcc ==
+ GST_ISOFF_FOURCC_SIDX &&
+ gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client) &&
+ dash_stream->allow_sidx) {
+ GstByteReader sub_reader;
+ GstIsoffParserResult res;
+ guint dummy;
+
+ dash_stream->sidx_base_offset =
+ dash_stream->isobmff_parser.current_start_offset + size;
+ dash_stream->allow_sidx = FALSE;
+
+ gst_byte_reader_get_sub_reader (&reader, &sub_reader, size - header_size);
+
+ res =
+ gst_isoff_sidx_parser_parse (&dash_stream->sidx_parser, &sub_reader,
+ &dummy);
+
+ if (res == GST_ISOFF_PARSER_DONE) {
+ guint64 first_offset = dash_stream->sidx_parser.sidx.first_offset;
+ GstSidxBox *sidx = SIDX (dash_stream);
+ guint i;
+
+ if (first_offset) {
+ GST_LOG_OBJECT (stream->pad,
+ "non-zero sidx first offset %" G_GUINT64_FORMAT, first_offset);
+ dash_stream->sidx_base_offset += first_offset;
+ }
+
+ for (i = 0; i < sidx->entries_count; i++) {
+ GstSidxBoxEntry *entry = &sidx->entries[i];
+
+ if (entry->ref_type != 0) {
+ GST_FIXME_OBJECT (stream->pad, "SIDX ref_type 1 not supported yet");
+ dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+ gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+ break;
+ }
+ }
+
+ /* We might've cleared the index above */
+ if (sidx->entries_count > 0) {
+ if (GST_CLOCK_TIME_IS_VALID (dash_stream->pending_seek_ts)) {
+ /* FIXME, preserve seek flags */
+ if (gst_dash_demux_stream_sidx_seek (dash_stream,
+ demux->segment.rate >= 0, 0, dash_stream->pending_seek_ts,
+ NULL) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (stream->pad, "Couldn't find position in sidx");
+ dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+ gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+ }
+ dash_stream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+ } else {
+
+ if (dash_stream->sidx_position == GST_CLOCK_TIME_NONE) {
+ SIDX (dash_stream)->entry_index = 0;
+ } else {
+ if (gst_dash_demux_stream_sidx_seek (dash_stream,
+ demux->segment.rate >= 0, GST_SEEK_FLAG_SNAP_BEFORE,
+ dash_stream->sidx_position, NULL) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (stream->pad,
+ "Couldn't find position in sidx");
+ dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+ gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+ }
+ }
+ dash_stream->sidx_position =
+ SIDX (dash_stream)->entries[SIDX (dash_stream)->entry_index].
+ pts;
+ }
+ }
+
+ if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED &&
+ SIDX (dash_stream)->entry_index != 0) {
+ /* Need to jump to the requested SIDX entry. Push everything up to
+ * the SIDX box below and let the caller handle everything else */
+ *sidx_seek_needed = TRUE;
+ break;
+ }
+ }
+ } else {
+ gst_byte_reader_skip (&reader, size - header_size);
+ }
+
+ dash_stream->isobmff_parser.current_fourcc = 0;
+ dash_stream->isobmff_parser.current_start_offset += size;
+ dash_stream->isobmff_parser.current_size = 0;
+ } while (gst_byte_reader_get_remaining (&reader) > 0);
+
+ gst_buffer_unmap (buffer, &map);
+
+ /* mdat? Push all we have and wait for it to be over */
+ if (dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MDAT) {
+ GstBuffer *pending;
+
+ GST_LOG_OBJECT (stream->pad,
+ "box %" GST_FOURCC_FORMAT " at offset %" G_GUINT64_FORMAT " size %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc),
+ dash_stream->isobmff_parser.current_start_offset,
+ dash_stream->isobmff_parser.current_size);
+
+ /* At mdat. Move the start of the mdat to the adapter and have everything
+ * else be pushed. We parsed all header boxes at this point and are not
+ * supposed to be called again until the next moof */
+ pending = _gst_buffer_split (buffer, gst_byte_reader_get_pos (&reader), -1);
+ gst_adapter_push (dash_stream->adapter, pending);
+ dash_stream->current_offset += gst_byte_reader_get_pos (&reader);
+ dash_stream->isobmff_parser.current_size = 0;
+
+ GST_BUFFER_OFFSET (buffer) = buffer_offset;
+ GST_BUFFER_OFFSET_END (buffer) =
+ buffer_offset + gst_buffer_get_size (buffer);
+ return gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ } else if (gst_byte_reader_get_pos (&reader) != 0) {
+ GstBuffer *pending;
+
+ /* Multiple complete boxes and no mdat? Push them and keep the remainder,
+ * which is the start of the next box if any remainder */
+
+ pending = _gst_buffer_split (buffer, gst_byte_reader_get_pos (&reader), -1);
+ gst_adapter_push (dash_stream->adapter, pending);
+ dash_stream->current_offset += gst_byte_reader_get_pos (&reader);
+ dash_stream->isobmff_parser.current_size = 0;
+
+ GST_BUFFER_OFFSET (buffer) = buffer_offset;
+ GST_BUFFER_OFFSET_END (buffer) =
+ buffer_offset + gst_buffer_get_size (buffer);
+ return gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ }
+
+ /* Not even a single complete, non-mdat box, wait */
+ dash_stream->isobmff_parser.current_size = 0;
+ gst_adapter_push (dash_stream->adapter, buffer);
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ gst_dash_demux_find_sync_samples (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemux *dashdemux = (GstDashDemux *) stream->demux;
+ GstDashDemuxStream *dash_stream = (GstDashDemuxStream *) stream;
+ guint i;
+ guint32 track_id = 0;
+ guint64 prev_traf_end;
+ gboolean trex_sample_flags = FALSE;
+
+ if (!dash_stream->moof) {
+ dashdemux->allow_trickmode_key_units = FALSE;
+ return FALSE;
+ }
+
+ dash_stream->current_sync_sample = -1;
+ dash_stream->moof_sync_samples =
+ g_array_new (FALSE, FALSE, sizeof (GstDashStreamSyncSample));
+
+ prev_traf_end = dash_stream->moof_offset;
+
+ /* generate table of keyframes and offsets */
+ for (i = 0; i < dash_stream->moof->traf->len; i++) {
+ GstTrafBox *traf = &g_array_index (dash_stream->moof->traf, GstTrafBox, i);
+ guint64 traf_offset = 0, prev_trun_end;
+ guint j;
+
+ if (i == 0) {
+ track_id = traf->tfhd.track_id;
+ } else if (track_id != traf->tfhd.track_id) {
+ GST_ERROR_OBJECT (stream->pad,
+ "moof with trafs of different track ids (%u != %u)", track_id,
+ traf->tfhd.track_id);
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ dash_stream->moof_sync_samples = NULL;
+ dashdemux->allow_trickmode_key_units = FALSE;
+ return FALSE;
+ }
+
+ if (traf->tfhd.flags & GST_TFHD_FLAGS_BASE_DATA_OFFSET_PRESENT) {
+ traf_offset = traf->tfhd.base_data_offset;
+ } else if (traf->tfhd.flags & GST_TFHD_FLAGS_DEFAULT_BASE_IS_MOOF) {
+ traf_offset = dash_stream->moof_offset;
+ } else {
+ traf_offset = prev_traf_end;
+ }
+
+ prev_trun_end = traf_offset;
+
+ for (j = 0; j < traf->trun->len; j++) {
+ GstTrunBox *trun = &g_array_index (traf->trun, GstTrunBox, j);
+ guint64 trun_offset, prev_sample_end;
+ guint k;
+
+ if (trun->flags & GST_TRUN_FLAGS_DATA_OFFSET_PRESENT) {
+ trun_offset = traf_offset + trun->data_offset;
+ } else {
+ trun_offset = prev_trun_end;
+ }
+
+ prev_sample_end = trun_offset;
+ for (k = 0; k < trun->samples->len; k++) {
+ GstTrunSample *sample =
+ &g_array_index (trun->samples, GstTrunSample, k);
+ guint64 sample_offset;
+ guint32 sample_flags;
+ #if 0
+ guint32 sample_duration;
+ #endif
+
+ sample_offset = prev_sample_end;
+
+ if (trun->flags & GST_TRUN_FLAGS_SAMPLE_FLAGS_PRESENT) {
+ sample_flags = sample->sample_flags;
+ } else if ((trun->flags & GST_TRUN_FLAGS_FIRST_SAMPLE_FLAGS_PRESENT)
+ && k == 0) {
+ sample_flags = trun->first_sample_flags;
+ } else if (traf->tfhd.
+ flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_FLAGS_PRESENT) {
+ sample_flags = traf->tfhd.default_sample_flags;
+ } else {
+ trex_sample_flags = TRUE;
+ continue;
+ }
+
+ #if 0
+ if (trun->flags & GST_TRUN_FLAGS_SAMPLE_DURATION_PRESENT) {
+ sample_duration = sample->sample_duration;
+ } else if (traf->tfhd.
+ flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_DURATION_PRESENT) {
+ sample_duration = traf->tfhd.default_sample_duration;
+ } else {
+ GST_FIXME_OBJECT (stream->pad,
+ "Sample duration given by trex - can't download only keyframes");
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ dash_stream->moof_sync_samples = NULL;
+ return FALSE;
+ }
+ #endif
+
+ if (trun->flags & GST_TRUN_FLAGS_SAMPLE_SIZE_PRESENT) {
+ prev_sample_end += sample->sample_size;
+ } else if (traf->tfhd.
+ flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_SIZE_PRESENT) {
+ prev_sample_end += traf->tfhd.default_sample_size;
+ } else {
+ GST_FIXME_OBJECT (stream->pad,
+ "Sample size given by trex - can't download only keyframes");
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ dash_stream->moof_sync_samples = NULL;
+ dashdemux->allow_trickmode_key_units = FALSE;
+ return FALSE;
+ }
+
+ /* Non-non-sync sample aka sync sample */
+ if (!GST_ISOFF_SAMPLE_FLAGS_SAMPLE_IS_NON_SYNC_SAMPLE (sample_flags) ||
+ GST_ISOFF_SAMPLE_FLAGS_SAMPLE_DEPENDS_ON (sample_flags) == 2) {
+ GstDashStreamSyncSample sync_sample =
+ { sample_offset, prev_sample_end - 1 };
+ /* TODO: need timestamps so we can decide to download or not */
+ g_array_append_val (dash_stream->moof_sync_samples, sync_sample);
+ }
+ }
+
+ prev_trun_end = prev_sample_end;
+ }
+
+ prev_traf_end = prev_trun_end;
+ }
+
+ if (trex_sample_flags) {
+ if (dash_stream->moof_sync_samples->len > 0) {
+ GST_LOG_OBJECT (stream->pad,
+ "Some sample flags given by trex but still found sync samples");
+ } else {
+ GST_FIXME_OBJECT (stream->pad,
+ "Sample flags given by trex - can't download only keyframes");
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ dash_stream->moof_sync_samples = NULL;
+ dashdemux->allow_trickmode_key_units = FALSE;
+ return FALSE;
+ }
+ }
+
+ if (dash_stream->moof_sync_samples->len == 0) {
+ GST_LOG_OBJECT (stream->pad, "No sync samples found in fragment");
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ dash_stream->moof_sync_samples = NULL;
+ dashdemux->allow_trickmode_key_units = FALSE;
+ return FALSE;
+ }
+
+ {
+ GstDashStreamSyncSample *sync_sample;
+ guint i;
+ guint size;
+ GstClockTime current_keyframe_distance;
+
+ for (i = 0; i < dash_stream->moof_sync_samples->len; i++) {
+ sync_sample =
+ &g_array_index (dash_stream->moof_sync_samples,
+ GstDashStreamSyncSample, i);
+ size = sync_sample->end_offset + 1 - sync_sample->start_offset;
+
+ if (dash_stream->keyframe_average_size) {
+ /* Over-estimate the keyframe size */
+ if (dash_stream->keyframe_average_size < size)
+ dash_stream->keyframe_average_size =
+ (size * 3 + dash_stream->keyframe_average_size) / 4;
+ else
+ dash_stream->keyframe_average_size =
+ (size + dash_stream->keyframe_average_size * 3) / 4;
+ } else {
+ dash_stream->keyframe_average_size = size;
+ }
+
+ if (i == 0) {
+ if (dash_stream->moof_offset + dash_stream->moof_size + 8 <
+ sync_sample->start_offset) {
+ dash_stream->first_sync_sample_after_moof = FALSE;
+ dash_stream->first_sync_sample_always_after_moof = FALSE;
+ } else {
+ dash_stream->first_sync_sample_after_moof =
+ (dash_stream->moof_sync_samples->len == 1
+ || demux->segment.rate > 0.0);
+ }
+ }
+ }
+
+ g_assert (stream->fragment.duration != 0);
+ g_assert (stream->fragment.duration != GST_CLOCK_TIME_NONE);
+
+ if (gst_mpd_client_has_isoff_ondemand_profile (dashdemux->client)
+ && dash_stream->sidx_position != GST_CLOCK_TIME_NONE
+ && SIDX (dash_stream)->entries) {
+ GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dash_stream);
+ current_keyframe_distance =
+ entry->duration / dash_stream->moof_sync_samples->len;
+ } else {
+ current_keyframe_distance =
+ stream->fragment.duration / dash_stream->moof_sync_samples->len;
+ }
+ dash_stream->current_fragment_keyframe_distance = current_keyframe_distance;
+
+ if (dash_stream->keyframe_average_distance) {
+ /* Under-estimate the keyframe distance */
+ if (dash_stream->keyframe_average_distance > current_keyframe_distance)
+ dash_stream->keyframe_average_distance =
+ (dash_stream->keyframe_average_distance * 3 +
+ current_keyframe_distance) / 4;
+ else
+ dash_stream->keyframe_average_distance =
+ (dash_stream->keyframe_average_distance +
+ current_keyframe_distance * 3) / 4;
+ } else {
+ dash_stream->keyframe_average_distance = current_keyframe_distance;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "average keyframe sample size: %" G_GUINT64_FORMAT,
+ dash_stream->keyframe_average_size);
+ GST_DEBUG_OBJECT (stream->pad,
+ "average keyframe distance: %" GST_TIME_FORMAT " (%" GST_TIME_FORMAT
+ ")", GST_TIME_ARGS (dash_stream->keyframe_average_distance),
+ GST_TIME_ARGS (current_keyframe_distance));
+ GST_DEBUG_OBJECT (stream->pad, "first sync sample after moof: %d",
+ dash_stream->first_sync_sample_after_moof);
+ }
+
+ return TRUE;
+ }
+
+
+ static GstFlowReturn
+ gst_dash_demux_handle_isobmff (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dash_stream = (GstDashDemuxStream *) stream;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+ gboolean sidx_advance = FALSE;
+
+ /* We parse all ISOBMFF boxes of a (sub)fragment until the mdat. This covers
+ * at least moov, moof and sidx boxes. Once mdat is received we just output
+ * everything until the next (sub)fragment */
+ if (dash_stream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT) {
+ gboolean sidx_seek_needed = FALSE;
+
+ ret = gst_dash_demux_parse_isobmff (demux, dash_stream, &sidx_seek_needed);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Go to selected segment if needed here */
+ if (sidx_seek_needed && !stream->downloading_index)
+ return GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT;
+
+ /* No mdat yet, let's get called again with the next boxes */
+ if (dash_stream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT)
+ return ret;
+
+ /* Here we end up only if we're right at the mdat start */
+
+ /* Jump to the next sync sample. As we're doing chunked downloading
+ * here, just drop data until our chunk is over so we can reuse the
+ * HTTP connection instead of having to create a new one or
+ * reuse the data if the sync sample follows the moof */
+ if (dash_stream->active_stream->mimeType == GST_STREAM_VIDEO
+ && gst_dash_demux_find_sync_samples (demux, stream) &&
+ GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+ guint idx = -1;
+
+ if (GST_CLOCK_TIME_IS_VALID (dash_stream->target_time)) {
+ idx =
+ (dash_stream->target_time -
+ dash_stream->current_fragment_timestamp) /
+ dash_stream->current_fragment_keyframe_distance;
+ } else if (stream->segment.rate > 0) {
+ idx = 0;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "target %" GST_TIME_FORMAT " idx %d",
+ GST_TIME_ARGS (dash_stream->target_time), idx);
+ /* Figure out target time */
+
+ if (dash_stream->first_sync_sample_after_moof && idx == 0) {
+ /* If we're here, don't throw away data but collect sync
+ * sample while we're at it below. We're doing chunked
+ * downloading so might need to adjust the next chunk size for
+ * the remainder */
+ dash_stream->current_sync_sample = 0;
+ GST_DEBUG_OBJECT (stream->pad, "Using first keyframe after header");
+ }
+ }
+
+ if (gst_adapter_available (dash_stream->adapter) == 0)
+ return ret;
+
+ /* We have some data from the mdat available in the adapter, handle it
+ * below in the push code */
+ } else {
+ /* Somewhere in the middle of the mdat */
+ }
+
+ /* At mdat */
+ if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ guint64 sidx_end_offset =
+ dash_stream->sidx_base_offset +
+ SIDX_CURRENT_ENTRY (dash_stream)->offset +
+ SIDX_CURRENT_ENTRY (dash_stream)->size;
+ gboolean has_next = gst_dash_demux_stream_has_next_subfragment (stream);
+ gsize available;
+
+ /* Need to handle everything in the adapter according to the parsed SIDX
+ * and advance subsegments accordingly */
+
+ available = gst_adapter_available (dash_stream->adapter);
+ if (dash_stream->current_offset + available < sidx_end_offset) {
+ buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+ } else {
+ if (!has_next && sidx_end_offset <= dash_stream->current_offset) {
+ /* Drain all bytes, since there might be trailing bytes at the end of subfragment */
+ buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+ } else {
+ if (sidx_end_offset <= dash_stream->current_offset) {
+ /* This means a corrupted stream or a bug: ignoring bugs, it
+ * should only happen if the SIDX index is corrupt */
+ GST_ERROR_OBJECT (stream->pad, "Invalid SIDX state");
+ gst_adapter_clear (dash_stream->adapter);
+ return GST_FLOW_ERROR;
+ } else {
+ buffer =
+ gst_adapter_take_buffer (dash_stream->adapter,
+ sidx_end_offset - dash_stream->current_offset);
+ sidx_advance = TRUE;
+ }
+ }
+ }
+ } else {
+ /* Take it all and handle it further below */
+ buffer =
+ gst_adapter_take_buffer (dash_stream->adapter,
+ gst_adapter_available (dash_stream->adapter));
+
+ /* Attention: All code paths below need to update dash_stream->current_offset */
+ }
+
+ /* We're actually running in key-units trick mode */
+ if (dash_stream->active_stream->mimeType == GST_STREAM_VIDEO
+ && dash_stream->moof_sync_samples
+ && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+ if (dash_stream->current_sync_sample == -1) {
+ /* We're doing chunked downloading and wait for finishing the current
+ * chunk so we can jump to the first keyframe */
+ dash_stream->current_offset += gst_buffer_get_size (buffer);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ } else {
+ GstDashStreamSyncSample *sync_sample =
+ &g_array_index (dash_stream->moof_sync_samples,
+ GstDashStreamSyncSample, dash_stream->current_sync_sample);
+ guint64 end_offset =
+ dash_stream->current_offset + gst_buffer_get_size (buffer);
+
+ /* Make sure to not download too much, this should only happen for
+ * the very first keyframe if it follows the moof */
+ if (dash_stream->current_offset >= sync_sample->end_offset + 1) {
+ dash_stream->current_offset += gst_buffer_get_size (buffer);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ } else if (end_offset > sync_sample->end_offset + 1) {
+ guint64 remaining =
+ sync_sample->end_offset + 1 - dash_stream->current_offset;
+ GstBuffer *sub = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0,
+ remaining);
+ gst_buffer_unref (buffer);
+ buffer = sub;
+ }
+ }
+ }
+
+ GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+ dash_stream->current_offset += gst_buffer_get_size (buffer);
+ GST_BUFFER_OFFSET_END (buffer) = dash_stream->current_offset;
+
+ ret = gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (sidx_advance) {
+ ret =
+ gst_adaptive_demux_stream_advance_fragment (demux, stream,
+ SIDX_CURRENT_ENTRY (dash_stream)->duration);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* If we still have data available, recurse and use it up if possible */
+ if (gst_adapter_available (dash_stream->adapter) > 0)
+ return gst_dash_demux_handle_isobmff (demux, stream);
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_dash_demux_data_received (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer)
+ {
+ GstDashDemuxStream *dash_stream = (GstDashDemuxStream *) stream;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint index_header_or_data;
+
+ if (stream->downloading_index)
+ index_header_or_data = 1;
+ else if (stream->downloading_header)
+ index_header_or_data = 2;
+ else
+ index_header_or_data = 3;
+
+ if (dash_stream->current_index_header_or_data != index_header_or_data) {
+ /* Clear pending data */
+ if (gst_adapter_available (dash_stream->adapter) != 0)
+ GST_ERROR_OBJECT (stream->pad,
+ "Had pending SIDX data after switch between index/header/data");
+ gst_adapter_clear (dash_stream->adapter);
+ dash_stream->current_index_header_or_data = index_header_or_data;
+ dash_stream->current_offset = -1;
+ }
+
+ if (dash_stream->current_offset == -1)
+ dash_stream->current_offset =
+ GST_BUFFER_OFFSET_IS_VALID (buffer) ? GST_BUFFER_OFFSET (buffer) : 0;
+
+ gst_adapter_push (dash_stream->adapter, buffer);
+ buffer = NULL;
+
+ if (dash_stream->is_isobmff || stream->downloading_index) {
+ /* SIDX index is also ISOBMMF */
+ ret = gst_dash_demux_handle_isobmff (demux, stream);
+ } else if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+ gsize available;
+
+ /* Not ISOBMFF but had a SIDX index. Does this even exist or work? */
+ while (ret == GST_FLOW_OK
+ && ((available = gst_adapter_available (dash_stream->adapter)) > 0)) {
+ gboolean advance = FALSE;
+ guint64 sidx_end_offset =
+ dash_stream->sidx_base_offset +
+ SIDX_CURRENT_ENTRY (dash_stream)->offset +
+ SIDX_CURRENT_ENTRY (dash_stream)->size;
+ gboolean has_next = gst_dash_demux_stream_has_next_subfragment (stream);
+
+ if (dash_stream->current_offset + available < sidx_end_offset) {
+ buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+ } else {
+ if (!has_next && sidx_end_offset <= dash_stream->current_offset) {
+ /* Drain all bytes, since there might be trailing bytes at the end of subfragment */
+ buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+ } else {
+ if (sidx_end_offset <= dash_stream->current_offset) {
+ /* This means a corrupted stream or a bug: ignoring bugs, it
+ * should only happen if the SIDX index is corrupt */
+ GST_ERROR_OBJECT (stream->pad, "Invalid SIDX state");
+ gst_adapter_clear (dash_stream->adapter);
+ ret = GST_FLOW_ERROR;
+ break;
+ } else {
+ buffer =
+ gst_adapter_take_buffer (dash_stream->adapter,
+ sidx_end_offset - dash_stream->current_offset);
+ advance = TRUE;
+ }
+ }
+ }
+
+ GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+ GST_BUFFER_OFFSET_END (buffer) =
+ GST_BUFFER_OFFSET (buffer) + gst_buffer_get_size (buffer);
+ dash_stream->current_offset = GST_BUFFER_OFFSET_END (buffer);
+
+ ret = gst_adaptive_demux_stream_push_buffer (stream, buffer);
+
+ if (advance) {
+ if (has_next) {
+ GstFlowReturn new_ret;
+ new_ret =
+ gst_adaptive_demux_stream_advance_fragment (demux, stream,
+ SIDX_CURRENT_ENTRY (dash_stream)->duration);
+
+ /* only overwrite if it was OK before */
+ if (ret == GST_FLOW_OK)
+ ret = new_ret;
+ } else {
+ break;
+ }
+ }
+ }
+ } else {
+ /* this should be the main header, just push it all */
+ buffer = gst_adapter_take_buffer (dash_stream->adapter,
+ gst_adapter_available (dash_stream->adapter));
+
+ GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+ GST_BUFFER_OFFSET_END (buffer) =
+ GST_BUFFER_OFFSET (buffer) + gst_buffer_get_size (buffer);
+ dash_stream->current_offset = GST_BUFFER_OFFSET_END (buffer);
+
+ ret = gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_dash_demux_stream_free (GstAdaptiveDemuxStream * stream)
+ {
+ GstDashDemuxStream *dash_stream = (GstDashDemuxStream *) stream;
+
+ gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+ if (dash_stream->adapter)
+ g_object_unref (dash_stream->adapter);
+ if (dash_stream->moof)
+ gst_isoff_moof_box_free (dash_stream->moof);
+ if (dash_stream->moof_sync_samples)
+ g_array_free (dash_stream->moof_sync_samples, TRUE);
+ }
+
+ static GstDashDemuxClockDrift *
+ gst_dash_demux_clock_drift_new (GstDashDemux * demux)
+ {
+ GstDashDemuxClockDrift *clock_drift;
+
+ clock_drift = g_slice_new0 (GstDashDemuxClockDrift);
+ g_mutex_init (&clock_drift->clock_lock);
+ clock_drift->next_update =
+ GST_TIME_AS_USECONDS (gst_adaptive_demux_get_monotonic_time
+ (GST_ADAPTIVE_DEMUX_CAST (demux)));
+ return clock_drift;
+ }
+
+ static void
+ gst_dash_demux_clock_drift_free (GstDashDemuxClockDrift * clock_drift)
+ {
+ if (clock_drift) {
+ g_mutex_lock (&clock_drift->clock_lock);
+ if (clock_drift->ntp_clock)
+ g_object_unref (clock_drift->ntp_clock);
+ g_mutex_unlock (&clock_drift->clock_lock);
+ g_mutex_clear (&clock_drift->clock_lock);
+ g_slice_free (GstDashDemuxClockDrift, clock_drift);
+ }
+ }
+
+ /*
+ * The value attribute of the UTCTiming element contains a white-space
+ * separated list of servers that are recommended to be used in
+ * combination with the NTP protocol as defined in IETF RFC 5905 for
+ * getting the appropriate time.
+ *
+ * The DASH standard does not specify which version of NTP. This
+ * function only works with NTPv4 servers.
+ */
+ static GstDateTime *
+ gst_dash_demux_poll_ntp_server (GstDashDemuxClockDrift * clock_drift,
+ gchar ** urls)
+ {
+ GstClockTime ntp_clock_time;
+ GDateTime *dt, *dt2;
+
+ if (!clock_drift->ntp_clock) {
+ GResolver *resolver;
+ GList *inet_addrs;
+ GError *err = NULL;
+ gchar *ip_addr;
+
+ resolver = g_resolver_get_default ();
+ /* We don't round-robin NTP servers. If the manifest specifies multiple
+ NTP time servers, select one at random */
+ clock_drift->selected_url = g_random_int_range (0, g_strv_length (urls));
+ GST_DEBUG ("Connecting to NTP time server %s",
+ urls[clock_drift->selected_url]);
+ inet_addrs = g_resolver_lookup_by_name (resolver,
+ urls[clock_drift->selected_url], NULL, &err);
+ g_object_unref (resolver);
+ if (!inet_addrs || g_list_length (inet_addrs) == 0) {
+ GST_ERROR ("Failed to resolve hostname of NTP server: %s",
+ err ? (err->message) : "unknown error");
+ if (inet_addrs)
+ g_resolver_free_addresses (inet_addrs);
+ if (err)
+ g_error_free (err);
+ return NULL;
+ }
+ ip_addr =
+ g_inet_address_to_string ((GInetAddress
+ *) (g_list_first (inet_addrs)->data));
+ clock_drift->ntp_clock = gst_ntp_clock_new ("dashntp", ip_addr, 123, 0);
+ g_free (ip_addr);
+ g_resolver_free_addresses (inet_addrs);
+ if (!clock_drift->ntp_clock) {
+ GST_ERROR ("Failed to create NTP clock");
+ return NULL;
+ }
+ if (!gst_clock_wait_for_sync (clock_drift->ntp_clock, 5 * GST_SECOND)) {
+ g_object_unref (clock_drift->ntp_clock);
+ clock_drift->ntp_clock = NULL;
+ GST_ERROR ("Failed to lock to NTP clock");
+ return NULL;
+ }
+ }
+ ntp_clock_time = gst_clock_get_time (clock_drift->ntp_clock);
+ if (ntp_clock_time == GST_CLOCK_TIME_NONE) {
+ GST_ERROR ("Failed to get time from NTP clock");
+ return NULL;
+ }
+ ntp_clock_time -= NTP_TO_UNIX_EPOCH * GST_SECOND;
+ dt = g_date_time_new_from_unix_utc (ntp_clock_time / GST_SECOND);
+ if (!dt) {
+ GST_ERROR ("Failed to create GstDateTime");
+ return NULL;
+ }
+ ntp_clock_time =
+ gst_util_uint64_scale (ntp_clock_time % GST_SECOND, 1000000, GST_SECOND);
+ dt2 = g_date_time_add (dt, ntp_clock_time);
+ g_date_time_unref (dt);
+ return gst_date_time_new_from_g_date_time (dt2);
+ }
+
+ struct Rfc5322TimeZone
+ {
+ const gchar *name;
+ gfloat tzoffset;
+ };
+
+ /*
+ Parse an RFC5322 (section 3.3) date-time from the Date: field in the
++ HTTP response.
+ See https://tools.ietf.org/html/rfc5322#section-3.3
+ */
+ static GstDateTime *
+ gst_dash_demux_parse_http_head (GstDashDemuxClockDrift * clock_drift,
+ GstFragment * download)
+ {
+ static const gchar *months[] = { NULL, "Jan", "Feb", "Mar", "Apr",
+ "May", "Jun", "Jul", "Aug",
+ "Sep", "Oct", "Nov", "Dec", NULL
+ };
+ static const struct Rfc5322TimeZone timezones[] = {
+ {"Z", 0},
+ {"UT", 0},
+ {"GMT", 0},
+ {"BST", 1},
+ {"EST", -5},
+ {"EDT", -4},
+ {"CST", -6},
+ {"CDT", -5},
+ {"MST", -7},
+ {"MDT", -6},
+ {"PST", -8},
+ {"PDT", -7},
+ {NULL, 0}
+ };
+ GstDateTime *value = NULL;
+ const GstStructure *response_headers;
+ const gchar *http_date;
+ const GValue *val;
+ gint ret;
+ const gchar *pos;
+ gint year = -1, month = -1, day = -1, hour = -1, minute = -1, second = -1;
+ gchar zone[6];
+ gchar monthstr[4];
+ gfloat tzoffset = 0;
+ gboolean parsed_tz = FALSE;
+
+ g_return_val_if_fail (download != NULL, NULL);
+ g_return_val_if_fail (download->headers != NULL, NULL);
+
+ val = gst_structure_get_value (download->headers, "response-headers");
+ if (!val) {
+ return NULL;
+ }
+ response_headers = gst_value_get_structure (val);
+ http_date = gst_structure_get_string (response_headers, "Date");
+ if (!http_date) {
+ return NULL;
+ }
+
+ /* skip optional text version of day of the week */
+ pos = strchr (http_date, ',');
+ if (pos)
+ pos++;
+ else
+ pos = http_date;
+ ret =
+ sscanf (pos, "%02d %3s %04d %02d:%02d:%02d %5s", &day, monthstr, &year,
+ &hour, &minute, &second, zone);
+ if (ret == 7) {
+ gchar *z = zone;
+ gint i;
+
+ for (i = 1; months[i]; ++i) {
+ if (g_ascii_strncasecmp (months[i], monthstr, strlen (months[i])) == 0) {
+ month = i;
+ break;
+ }
+ }
+ for (i = 0; timezones[i].name && !parsed_tz; ++i) {
+ if (g_ascii_strncasecmp (timezones[i].name, z,
+ strlen (timezones[i].name)) == 0) {
+ tzoffset = timezones[i].tzoffset;
+ parsed_tz = TRUE;
+ }
+ }
+ if (!parsed_tz) {
+ gint hh, mm;
+ gboolean neg = FALSE;
+ /* check if it is in the form +-HHMM */
+ if (*z == '+' || *z == '-') {
+ if (*z == '+')
+ ++z;
+ else if (*z == '-') {
+ ++z;
+ neg = TRUE;
+ }
+ ret = sscanf (z, "%02d%02d", &hh, &mm);
+ if (ret == 2) {
+ tzoffset = hh;
+ tzoffset += mm / 60.0;
+ if (neg)
+ tzoffset = -tzoffset;
+ parsed_tz = TRUE;
+ }
+ }
+ }
+ /* Accept year in both 2 digit or 4 digit format */
+ if (year < 100)
+ year += 2000;
+ }
+ if (month > 0 && parsed_tz) {
+ value = gst_date_time_new (tzoffset,
+ year, month, day, hour, minute, second);
+ }
+ return value;
+ }
+
+ /*
+ The timing information is contained in the message body of the HTTP
+ response and contains a time value formatted according to NTP timestamp
+ format in IETF RFC 5905.
+
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | Seconds |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | Fraction |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ NTP Timestamp Format
+ */
+ static GstDateTime *
+ gst_dash_demux_parse_http_ntp (GstDashDemuxClockDrift * clock_drift,
+ GstBuffer * buffer)
+ {
+ gint64 seconds;
+ guint64 fraction;
+ GDateTime *dt, *dt2;
+ GstMapInfo mapinfo;
+
+ /* See https://tools.ietf.org/html/rfc5905#page-12 for details of
+ the NTP Timestamp Format */
+ gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
+ if (mapinfo.size != 8) {
+ gst_buffer_unmap (buffer, &mapinfo);
+ return NULL;
+ }
+ seconds = GST_READ_UINT32_BE (mapinfo.data);
+ fraction = GST_READ_UINT32_BE (mapinfo.data + 4);
+ gst_buffer_unmap (buffer, &mapinfo);
+ fraction = gst_util_uint64_scale (fraction, 1000000,
+ G_GUINT64_CONSTANT (1) << 32);
+ /* subtract constant to convert from 1900 based time to 1970 based time */
+ seconds -= NTP_TO_UNIX_EPOCH;
+ dt = g_date_time_new_from_unix_utc (seconds);
+ dt2 = g_date_time_add (dt, fraction);
+ g_date_time_unref (dt);
+ return gst_date_time_new_from_g_date_time (dt2);
+ }
+
+ /*
+ The timing information is contained in the message body of the
+ HTTP response and contains a time value formatted according to
+ xs:dateTime as defined in W3C XML Schema Part 2: Datatypes specification.
+ */
+ static GstDateTime *
+ gst_dash_demux_parse_http_xsdate (GstDashDemuxClockDrift * clock_drift,
+ GstBuffer * buffer)
+ {
+ GstDateTime *value = NULL;
+ GstMapInfo mapinfo;
+
+ /* the string from the server might not be zero terminated */
+ if (gst_buffer_map (buffer, &mapinfo, GST_MAP_READ)) {
+ gchar *str;
+ str = g_strndup ((const gchar *) mapinfo.data, mapinfo.size);
+ gst_buffer_unmap (buffer, &mapinfo);
+ value = gst_date_time_new_from_iso8601_string (str);
+ g_free (str);
+ }
+ return value;
+ }
+
+ static gboolean
+ gst_dash_demux_poll_clock_drift (GstDashDemux * demux)
+ {
+ GstDashDemuxClockDrift *clock_drift;
+ GDateTime *start = NULL, *end;
+ GstBuffer *buffer = NULL;
+ GstDateTime *value = NULL;
+ gboolean ret = FALSE;
+ gint64 now;
+ GstMPDUTCTimingType method;
+ gchar **urls;
+
+ g_return_val_if_fail (demux != NULL, FALSE);
+ g_return_val_if_fail (demux->clock_drift != NULL, FALSE);
+ clock_drift = demux->clock_drift;
+ now =
+ GST_TIME_AS_USECONDS (gst_adaptive_demux_get_monotonic_time
+ (GST_ADAPTIVE_DEMUX_CAST (demux)));
+ if (now < clock_drift->next_update) {
+ /*TODO: If a fragment fails to download in adaptivedemux, it waits
+ for a manifest reload before another attempt to fetch a fragment.
+ Section 10.8.6 of the DVB-DASH standard states that the DASH client
+ shall refresh the manifest and resynchronise to one of the time sources.
+
+ Currently the fact that the manifest refresh follows a download failure
+ does not make it into dashdemux. */
+ return TRUE;
+ }
+ urls = gst_mpd_client_get_utc_timing_sources (demux->client,
+ SUPPORTED_CLOCK_FORMATS, &method);
+ if (!urls) {
+ return FALSE;
+ }
+ /* Update selected_url just in case the number of URLs in the UTCTiming
+ element has shrunk since the last poll */
+ clock_drift->selected_url = clock_drift->selected_url % g_strv_length (urls);
+ g_mutex_lock (&clock_drift->clock_lock);
+
+ if (method == GST_MPD_UTCTIMING_TYPE_NTP) {
+ value = gst_dash_demux_poll_ntp_server (clock_drift, urls);
+ if (!value) {
+ GST_ERROR_OBJECT (demux, "Failed to fetch time from NTP server %s",
+ urls[clock_drift->selected_url]);
+ g_mutex_unlock (&clock_drift->clock_lock);
+ goto quit;
+ }
+ }
+ start =
+ gst_adaptive_demux_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST (demux));
+ if (!value) {
+ GstFragment *download;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ GST_DEBUG_OBJECT (demux, "Fetching current time from %s",
++ urls[clock_drift->selected_url]);
++ download = gst_uri_downloader_fetch_uri (GST_ADAPTIVE_DEMUX_CAST
++ (demux)->downloader, urls[clock_drift->selected_url], NULL, NULL, NULL,
++ DEFAULT_ADAPTIVE_RETRY, DEFAULT_ADAPTIVE_TIMEOUT, TRUE, FALSE, TRUE,
++ NULL);
++#else
+ gint64 range_start = 0, range_end = -1;
+ GST_DEBUG_OBJECT (demux, "Fetching current time from %s",
+ urls[clock_drift->selected_url]);
+ if (method == GST_MPD_UTCTIMING_TYPE_HTTP_HEAD) {
+ range_start = -1;
+ }
+ download =
+ gst_uri_downloader_fetch_uri_with_range (GST_ADAPTIVE_DEMUX_CAST
+ (demux)->downloader, urls[clock_drift->selected_url], NULL, TRUE, TRUE,
+ TRUE, range_start, range_end, NULL);
++#endif
+ if (download) {
+ if (method == GST_MPD_UTCTIMING_TYPE_HTTP_HEAD && download->headers) {
+ value = gst_dash_demux_parse_http_head (clock_drift, download);
+ } else {
+ buffer = gst_fragment_get_buffer (download);
+ }
+ g_object_unref (download);
+ }
+ }
+ g_mutex_unlock (&clock_drift->clock_lock);
+ if (!value && !buffer) {
+ GST_ERROR_OBJECT (demux, "Failed to fetch time from %s",
+ urls[clock_drift->selected_url]);
+ goto quit;
+ }
+ end = gst_adaptive_demux_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST (demux));
+ if (!value && method == GST_MPD_UTCTIMING_TYPE_HTTP_NTP) {
+ value = gst_dash_demux_parse_http_ntp (clock_drift, buffer);
+ } else if (!value) {
+ /* GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE or GST_MPD_UTCTIMING_TYPE_HTTP_ISO */
+ value = gst_dash_demux_parse_http_xsdate (clock_drift, buffer);
+ }
+ if (buffer)
+ gst_buffer_unref (buffer);
+ if (value) {
+ GTimeSpan download_duration = g_date_time_difference (end, start);
+ GDateTime *client_now, *server_now;
+ /* We don't know when the server sampled its clock, but we know
+ it must have been before "end" and probably after "start".
+ A reasonable estimate is to use (start+end)/2
+ */
+ client_now = g_date_time_add (start, download_duration / 2);
+ server_now = gst_date_time_to_g_date_time (value);
+ /* If gst_date_time_new_from_iso8601_string is given an unsupported
+ ISO 8601 format, it can return a GstDateTime that is not valid,
+ which causes gst_date_time_to_g_date_time to return NULL */
+ if (server_now) {
+ g_mutex_lock (&clock_drift->clock_lock);
+ clock_drift->clock_compensation =
+ g_date_time_difference (server_now, client_now);
+ g_mutex_unlock (&clock_drift->clock_lock);
+ GST_DEBUG_OBJECT (demux,
+ "Difference between client and server clocks is %lfs",
+ ((double) clock_drift->clock_compensation) / 1000000.0);
+ g_date_time_unref (server_now);
+ ret = TRUE;
+ } else {
+ GST_ERROR_OBJECT (demux, "Failed to parse DateTime from server");
+ }
+ g_date_time_unref (client_now);
+ gst_date_time_unref (value);
+ } else {
+ GST_ERROR_OBJECT (demux, "Failed to parse DateTime from server");
+ }
+ g_date_time_unref (end);
+ quit:
+ if (start)
+ g_date_time_unref (start);
+ /* if multiple URLs were specified, use a simple round-robin to
+ poll each server */
+ g_mutex_lock (&clock_drift->clock_lock);
+ if (method == GST_MPD_UTCTIMING_TYPE_NTP) {
+ clock_drift->next_update = now + FAST_CLOCK_UPDATE_INTERVAL;
+ } else {
+ clock_drift->selected_url =
+ (1 + clock_drift->selected_url) % g_strv_length (urls);
+ if (ret) {
+ clock_drift->next_update = now + SLOW_CLOCK_UPDATE_INTERVAL;
+ } else {
+ clock_drift->next_update = now + FAST_CLOCK_UPDATE_INTERVAL;
+ }
+ }
+ g_mutex_unlock (&clock_drift->clock_lock);
+ return ret;
+ }
+
+ static GTimeSpan
+ gst_dash_demux_get_clock_compensation (GstDashDemux * demux)
+ {
+ GTimeSpan rv = 0;
+ if (demux->clock_drift) {
+ g_mutex_lock (&demux->clock_drift->clock_lock);
+ rv = demux->clock_drift->clock_compensation;
+ g_mutex_unlock (&demux->clock_drift->clock_lock);
+ }
+ GST_LOG_OBJECT (demux, "Clock drift %" GST_STIME_FORMAT, GST_STIME_ARGS (rv));
+ return rv;
+ }
+
+ static GDateTime *
+ gst_dash_demux_get_server_now_utc (GstDashDemux * demux)
+ {
+ GDateTime *client_now;
+ GDateTime *server_now;
+
+ client_now =
+ gst_adaptive_demux_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST (demux));
+ server_now =
+ g_date_time_add (client_now,
+ gst_dash_demux_get_clock_compensation (demux));
+ g_date_time_unref (client_now);
+ return server_now;
+ }
--- /dev/null
-
+ /* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ * Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+ #include "gstmpdclient.h"
+ #include "gstmpdparser.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_dash_mpd_client_debug);
+ #undef GST_CAT_DEFAULT
+ #define GST_CAT_DEFAULT gst_dash_mpd_client_debug
+
+ G_DEFINE_TYPE (GstMPDClient, gst_mpd_client, GST_TYPE_OBJECT);
+
+ static GstMPDSegmentBaseNode *gst_mpd_client_get_segment_base (GstMPDPeriodNode
+ * Period, GstMPDAdaptationSetNode * AdaptationSet,
+ GstMPDRepresentationNode * Representation);
+ static GstMPDSegmentListNode *gst_mpd_client_get_segment_list (GstMPDClient *
+ client, GstMPDPeriodNode * Period, GstMPDAdaptationSetNode * AdaptationSet,
+ GstMPDRepresentationNode * Representation);
+ /* Segments */
+ static guint gst_mpd_client_get_segments_counts (GstMPDClient * client,
+ GstActiveStream * stream);
+
+ static GList *gst_mpd_client_fetch_external_periods (GstMPDClient * client,
+ GstMPDPeriodNode * period_node);
+ static GList *gst_mpd_client_fetch_external_adaptation_set (GstMPDClient *
+ client, GstMPDPeriodNode * period, GstMPDAdaptationSetNode * adapt_set);
+
+ static GstMPDRepresentationNode *gst_mpd_client_get_lowest_representation (GList
+ * Representations);
+ static GstStreamPeriod *gst_mpd_client_get_stream_period (GstMPDClient *
+ client);
+
+ typedef GstMPDNode *(*MpdClientStringIDFilter) (GList * list, gchar * data);
+ typedef GstMPDNode *(*MpdClientIDFilter) (GList * list, guint data);
+
+ static GstMPDNode *
+ gst_mpd_client_get_period_with_id (GList * periods, gchar * period_id)
+ {
+ GstMPDPeriodNode *period;
+ GList *list = NULL;
+
+ for (list = g_list_first (periods); list; list = g_list_next (list)) {
+ period = (GstMPDPeriodNode *) list->data;
+ if (!g_strcmp0 (period->id, period_id))
+ return GST_MPD_NODE (period);
+ }
+ return NULL;
+ }
+
+ static GstMPDNode *
+ gst_mpd_client_get_adaptation_set_with_id (GList * adaptation_sets, guint id)
+ {
+ GstMPDAdaptationSetNode *adaptation_set;
+ GList *list = NULL;
+
+ for (list = g_list_first (adaptation_sets); list; list = g_list_next (list)) {
+ adaptation_set = (GstMPDAdaptationSetNode *) list->data;
+ if (adaptation_set->id == id)
+ return GST_MPD_NODE (adaptation_set);
+ }
+ return NULL;
+ }
+
+ static GstMPDNode *
+ gst_mpd_client_get_representation_with_id (GList * representations,
+ gchar * rep_id)
+ {
+ GstMPDRepresentationNode *representation;
+ GList *list = NULL;
+
+ for (list = g_list_first (representations); list; list = g_list_next (list)) {
+ representation = (GstMPDRepresentationNode *) list->data;
+ if (!g_strcmp0 (representation->id, rep_id))
+ return GST_MPD_NODE (representation);
+ }
+ return NULL;
+ }
+
+ static gchar *
+ _generate_new_string_id (GList * list, const gchar * tuple,
+ MpdClientStringIDFilter filter)
+ {
+ guint i = 0;
+ gchar *id = NULL;
+ GstMPDNode *node;
+ do {
+ g_free (id);
+ id = g_strdup_printf (tuple, i);
+ node = filter (list, id);
+ i++;
+ } while (node);
+
+ return id;
+ }
+
+ static guint
+ _generate_new_id (GList * list, MpdClientIDFilter filter)
+ {
+ guint id = 0;
+ GstMPDNode *node;
+ do {
+ node = filter (list, id);
+ id++;
+ } while (node);
+
+ return id;
+ }
+
+ static GstMPDRepresentationNode *
+ gst_mpd_client_get_lowest_representation (GList * Representations)
+ {
+ GList *list = NULL;
+ GstMPDRepresentationNode *rep = NULL;
+ GstMPDRepresentationNode *lowest = NULL;
+
+ if (Representations == NULL)
+ return NULL;
+
+ for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+ rep = (GstMPDRepresentationNode *) list->data;
+ if (rep && (!lowest || rep->bandwidth < lowest->bandwidth)) {
+ lowest = rep;
+ }
+ }
+
+ return lowest;
+ }
+
+ #if 0
+ static GstMPDRepresentationNode *
+ gst_mpdparser_get_highest_representation (GList * Representations)
+ {
+ GList *list = NULL;
+
+ if (Representations == NULL)
+ return NULL;
+
+ list = g_list_last (Representations);
+
+ return list ? (GstMPDRepresentationNode *) list->data : NULL;
+ }
+
+ static GstMPDRepresentationNode *
+ gst_mpdparser_get_representation_with_max_bandwidth (GList * Representations,
+ gint max_bandwidth)
+ {
+ GList *list = NULL;
+ GstMPDRepresentationNode *representation, *best_rep = NULL;
+
+ if (Representations == NULL)
+ return NULL;
+
+ if (max_bandwidth <= 0) /* 0 => get highest representation available */
+ return gst_mpdparser_get_highest_representation (Representations);
+
+ for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+ representation = (GstMPDRepresentationNode *) list->data;
+ if (representation && representation->bandwidth <= max_bandwidth) {
+ best_rep = representation;
+ }
+ }
+
+ return best_rep;
+ }
+ #endif
+
+ static GstMPDSegmentListNode *
+ gst_mpd_client_fetch_external_segment_list (GstMPDClient * client,
+ GstMPDPeriodNode * Period,
+ GstMPDAdaptationSetNode * AdaptationSet,
+ GstMPDRepresentationNode * Representation,
+ GstMPDSegmentListNode * parent, GstMPDSegmentListNode * segment_list)
+ {
+ GstFragment *download;
+ GstBuffer *segment_list_buffer = NULL;
+ GstMapInfo map;
+ GError *err = NULL;
+
+ GstUri *base_uri, *uri;
+ gchar *query = NULL;
+ gchar *uri_string;
+ GstMPDSegmentListNode *new_segment_list = NULL;
+
+ /* ISO/IEC 23009-1:2014 5.5.3 4)
+ * Remove nodes that resolve to nothing when resolving
+ */
+ if (strcmp (segment_list->xlink_href,
+ "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+ return NULL;
+ }
+
+ if (!client->downloader) {
+ return NULL;
+ }
+
+ /* Build absolute URI */
+
+ /* Get base URI at the MPD level */
+ base_uri =
+ gst_uri_from_string (client->mpd_base_uri ? client->
+ mpd_base_uri : client->mpd_uri);
+
+ /* combine a BaseURL at the MPD level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+ &query, 0);
+
+ /* combine a BaseURL at the Period level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, Period->BaseURLs, &query, 0);
+
+ if (AdaptationSet) {
+ /* combine a BaseURL at the AdaptationSet level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, AdaptationSet->BaseURLs, &query,
+ 0);
+
+ if (Representation) {
+ /* combine a BaseURL at the Representation level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, Representation->BaseURLs,
+ &query, 0);
+ }
+ }
+
+ uri = gst_uri_from_string_with_base (base_uri, segment_list->xlink_href);
+ if (query)
+ gst_uri_set_query_string (uri, query);
+ g_free (query);
+ uri_string = gst_uri_to_string (uri);
+ gst_uri_unref (base_uri);
+ gst_uri_unref (uri);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (client->downloader,
++ uri_string, client->mpd_uri, NULL, NULL, DEFAULT_ADAPTIVE_RETRY,
++ DEFAULT_ADAPTIVE_TIMEOUT, TRUE, FALSE, TRUE, &err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (client->downloader,
+ uri_string, client->mpd_uri, TRUE, FALSE, TRUE, &err);
++#endif
+ g_free (uri_string);
+
+ if (!download) {
+ GST_ERROR ("Failed to download external SegmentList node at '%s': %s",
+ segment_list->xlink_href, err->message);
+ g_clear_error (&err);
+ return NULL;
+ }
+
+ segment_list_buffer = gst_fragment_get_buffer (download);
+ g_object_unref (download);
+
+ gst_buffer_map (segment_list_buffer, &map, GST_MAP_READ);
+
+ new_segment_list =
+ gst_mpdparser_get_external_segment_list ((const gchar *) map.data,
+ map.size, parent);
+
+ if (segment_list_buffer) {
+ gst_buffer_unmap (segment_list_buffer, &map);
+ gst_buffer_unref (segment_list_buffer);
+ }
+
+ return new_segment_list;
+ }
+
+ static GstMPDSegmentBaseNode *
+ gst_mpd_client_get_segment_base (GstMPDPeriodNode * Period,
+ GstMPDAdaptationSetNode * AdaptationSet,
+ GstMPDRepresentationNode * Representation)
+ {
+ GstMPDSegmentBaseNode *SegmentBase = NULL;
+
+ if (Representation && Representation->SegmentBase) {
+ SegmentBase = Representation->SegmentBase;
+ } else if (AdaptationSet && AdaptationSet->SegmentBase) {
+ SegmentBase = AdaptationSet->SegmentBase;
+ } else if (Period && Period->SegmentBase) {
+ SegmentBase = Period->SegmentBase;
+ }
+ /* the SegmentBase element could be encoded also inside a SegmentList element */
+ if (SegmentBase == NULL) {
+ if (Representation && Representation->SegmentList
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->SegmentList)
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->
+ SegmentList)->SegmentBase) {
+ SegmentBase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->
+ SegmentList)->SegmentBase;
+ } else if (AdaptationSet && AdaptationSet->SegmentList
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->SegmentList)
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->
+ SegmentList)->SegmentBase) {
+ SegmentBase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->
+ SegmentList)->SegmentBase;
+ } else if (Period && Period->SegmentList
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)
+ && GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)->SegmentBase) {
+ SegmentBase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)->SegmentBase;
+ }
+ }
+
+ return SegmentBase;
+ }
+
+ static GstMPDSegmentListNode *
+ gst_mpd_client_get_segment_list (GstMPDClient * client,
+ GstMPDPeriodNode * Period, GstMPDAdaptationSetNode * AdaptationSet,
+ GstMPDRepresentationNode * Representation)
+ {
+ GstMPDSegmentListNode **SegmentList;
+ GstMPDSegmentListNode *ParentSegmentList = NULL;
+
+ if (Representation && Representation->SegmentList) {
+ SegmentList = &Representation->SegmentList;
+ ParentSegmentList = AdaptationSet->SegmentList;
+ } else if (AdaptationSet && AdaptationSet->SegmentList) {
+ SegmentList = &AdaptationSet->SegmentList;
+ ParentSegmentList = Period->SegmentList;
+ Representation = NULL;
+ } else {
+ Representation = NULL;
+ AdaptationSet = NULL;
+ SegmentList = &Period->SegmentList;
+ }
+
+ /* Resolve external segment list here. */
+ if (*SegmentList && (*SegmentList)->xlink_href) {
+ GstMPDSegmentListNode *new_segment_list;
+
+ /* TODO: Use SegmentList of parent if
+ * - Parent has its own SegmentList
+ * - Fail to get SegmentList from external xml
+ */
+ new_segment_list =
+ gst_mpd_client_fetch_external_segment_list (client, Period,
+ AdaptationSet, Representation, ParentSegmentList, *SegmentList);
+
+ gst_mpd_segment_list_node_free (*SegmentList);
+ *SegmentList = new_segment_list;
+ }
+
+ return *SegmentList;
+ }
+
+ static GstClockTime
+ gst_mpd_client_get_segment_duration (GstMPDClient * client,
+ GstActiveStream * stream, guint64 * scale_dur)
+ {
+ GstStreamPeriod *stream_period;
+ GstMPDMultSegmentBaseNode *base = NULL;
+ GstClockTime duration = 0;
+
+ g_return_val_if_fail (stream != NULL, GST_CLOCK_TIME_NONE);
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, GST_CLOCK_TIME_NONE);
+
+ if (stream->cur_segment_list) {
+ base = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_segment_list);
+ } else if (stream->cur_seg_template) {
+ base = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template);
+ }
+
+ if (base == NULL || base->SegmentBase == NULL) {
+ /* this may happen when we have a single segment */
+ duration = stream_period->duration;
+ if (scale_dur)
+ *scale_dur = duration;
+ } else {
+ /* duration is guint so this cannot overflow */
+ duration = base->duration * GST_SECOND;
+ if (scale_dur)
+ *scale_dur = duration;
+ duration /= base->SegmentBase->timescale;
+ }
+
+ return duration;
+ }
+
+ void
+ gst_mpd_client_active_streams_free (GstMPDClient * client)
+ {
+ if (client->active_streams) {
+ g_list_foreach (client->active_streams,
+ (GFunc) gst_mpdparser_free_active_stream, NULL);
+ g_list_free (client->active_streams);
+ client->active_streams = NULL;
+ }
+ }
+
+ static void
+ gst_mpd_client_finalize (GObject * object)
+ {
+ GstMPDClient *client = GST_MPD_CLIENT (object);
+
+ if (client->mpd_root_node)
+ gst_mpd_root_node_free (client->mpd_root_node);
+
+ if (client->periods) {
+ g_list_free_full (client->periods,
+ (GDestroyNotify) gst_mpdparser_free_stream_period);
+ }
+
+ gst_mpd_client_active_streams_free (client);
+
+ g_free (client->mpd_uri);
+ client->mpd_uri = NULL;
+ g_free (client->mpd_base_uri);
+ client->mpd_base_uri = NULL;
+
+ if (client->downloader)
+ gst_object_unref (client->downloader);
+ client->downloader = NULL;
+
+ G_OBJECT_CLASS (gst_mpd_client_parent_class)->finalize (object);
+ }
+
+ static void
+ gst_mpd_client_class_init (GstMPDClientClass * klass)
+ {
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ object_class->finalize = gst_mpd_client_finalize;
+ }
+
+ static void
+ gst_mpd_client_init (GstMPDClient * client)
+ {
+ }
+
+ GstMPDClient *
+ gst_mpd_client_new (void)
+ {
+ GST_DEBUG_CATEGORY_INIT (gst_dash_mpd_client_debug, "dashmpdclient", 0,
+ "DashmMpdClient");
+ return g_object_new (GST_TYPE_MPD_CLIENT, NULL);
+ }
+
+ GstMPDClient *
+ gst_mpd_client_new_static (void)
+ {
+ GstMPDClient *client = gst_mpd_client_new ();
+
+ client->mpd_root_node = gst_mpd_root_node_new ();
+ client->mpd_root_node->default_namespace =
+ g_strdup ("urn:mpeg:dash:schema:mpd:2011");
+ client->mpd_root_node->profiles =
+ g_strdup ("urn:mpeg:dash:profile:isoff-main:2011");
+ client->mpd_root_node->type = GST_MPD_FILE_TYPE_STATIC;
+ client->mpd_root_node->minBufferTime = 1500;
+
+ return client;
+ }
+
+ void
+ gst_mpd_client_free (GstMPDClient * client)
+ {
+ if (client)
+ gst_object_unref (client);
+ }
+
+ gboolean
+ gst_mpd_client_parse (GstMPDClient * client, const gchar * data, gint size)
+ {
+ gboolean ret = FALSE;
+
+
+ ret = gst_mpdparser_get_mpd_root_node (&client->mpd_root_node, data, size);
+
+ if (ret) {
+ gst_mpd_client_check_profiles (client);
+ gst_mpd_client_fetch_on_load_external_resources (client);
+ }
+
+ return ret;
+ }
+
+
+ gboolean
+ gst_mpd_client_get_xml_content (GstMPDClient * client, gchar ** data,
+ gint * size)
+ {
+ gboolean ret = FALSE;
+
+ g_return_val_if_fail (client != NULL, ret);
+ g_return_val_if_fail (client->mpd_root_node != NULL, ret);
+
+ ret = gst_mpd_node_get_xml_buffer (GST_MPD_NODE (client->mpd_root_node),
+ data, (int *) size);
+
+ return ret;
+ }
+
+ GstDateTime *
+ gst_mpd_client_get_availability_start_time (GstMPDClient * client)
+ {
+ GstDateTime *start_time;
+
+ if (client == NULL)
+ return (GstDateTime *) NULL;
+
+ start_time = client->mpd_root_node->availabilityStartTime;
+ if (start_time)
+ gst_date_time_ref (start_time);
+ return start_time;
+ }
+
+ void
+ gst_mpd_client_set_uri_downloader (GstMPDClient * client,
+ GstUriDownloader * downloader)
+ {
+ if (client->downloader)
+ gst_object_unref (client->downloader);
+ client->downloader = gst_object_ref (downloader);
+ }
+
+ void
+ gst_mpd_client_check_profiles (GstMPDClient * client)
+ {
+ GST_DEBUG ("Profiles: %s",
+ client->mpd_root_node->profiles ? client->mpd_root_node->
+ profiles : "<none>");
+
+ if (!client->mpd_root_node->profiles)
+ return;
+
+ if (g_strstr_len (client->mpd_root_node->profiles, -1,
+ "urn:mpeg:dash:profile:isoff-on-demand:2011")) {
+ client->profile_isoff_ondemand = TRUE;
+ GST_DEBUG ("Found ISOFF on demand profile (2011)");
+ }
+ }
+
+ void
+ gst_mpd_client_fetch_on_load_external_resources (GstMPDClient * client)
+ {
+ GList *l;
+
+ for (l = client->mpd_root_node->Periods; l; /* explicitly advanced below */ ) {
+ GstMPDPeriodNode *period = l->data;
+ GList *m;
+
+ if (period->xlink_href && period->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+ GList *new_periods, *prev, *next;
+
+ new_periods = gst_mpd_client_fetch_external_periods (client, period);
+
+ prev = l->prev;
+ client->mpd_root_node->Periods =
+ g_list_delete_link (client->mpd_root_node->Periods, l);
+ gst_mpd_period_node_free (period);
+ period = NULL;
+
+ /* Get new next node, we will insert before this */
+ if (prev)
+ next = prev->next;
+ else
+ next = client->mpd_root_node->Periods;
+
+ while (new_periods) {
+ client->mpd_root_node->Periods =
+ g_list_insert_before (client->mpd_root_node->Periods, next,
+ new_periods->data);
+ new_periods = g_list_delete_link (new_periods, new_periods);
+ }
+ next = NULL;
+
+ /* Update our iterator to the first new period if any, or the next */
+ if (prev)
+ l = prev->next;
+ else
+ l = client->mpd_root_node->Periods;
+
+ continue;
+ }
+
+ if (period->SegmentList && period->SegmentList->xlink_href
+ && period->SegmentList->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+ GstMPDSegmentListNode *new_segment_list;
+
+ new_segment_list =
+ gst_mpd_client_fetch_external_segment_list (client, period, NULL,
+ NULL, NULL, period->SegmentList);
+
+ gst_mpd_segment_list_node_free (period->SegmentList);
+ period->SegmentList = new_segment_list;
+ }
+
+ for (m = period->AdaptationSets; m; /* explicitly advanced below */ ) {
+ GstMPDAdaptationSetNode *adapt_set = m->data;
+ GList *n;
+
+ if (adapt_set->xlink_href
+ && adapt_set->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+ GList *new_adapt_sets, *prev, *next;
+
+ new_adapt_sets =
+ gst_mpd_client_fetch_external_adaptation_set (client, period,
+ adapt_set);
+
+ prev = m->prev;
+ period->AdaptationSets = g_list_delete_link (period->AdaptationSets, m);
+ gst_mpd_adaptation_set_node_free (adapt_set);
+ adapt_set = NULL;
+
+ /* Get new next node, we will insert before this */
+ if (prev)
+ next = prev->next;
+ else
+ next = period->AdaptationSets;
+
+ while (new_adapt_sets) {
+ period->AdaptationSets =
+ g_list_insert_before (period->AdaptationSets, next,
+ new_adapt_sets->data);
+ new_adapt_sets = g_list_delete_link (new_adapt_sets, new_adapt_sets);
+ }
+ next = NULL;
+
+ /* Update our iterator to the first new adapt_set if any, or the next */
+ if (prev)
+ m = prev->next;
+ else
+ m = period->AdaptationSets;
+
+ continue;
+ }
+
+ if (adapt_set->SegmentList && adapt_set->SegmentList->xlink_href
+ && adapt_set->SegmentList->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+ GstMPDSegmentListNode *new_segment_list;
+
+ new_segment_list =
+ gst_mpd_client_fetch_external_segment_list (client, period,
+ adapt_set, NULL, period->SegmentList, adapt_set->SegmentList);
+
+ gst_mpd_segment_list_node_free (adapt_set->SegmentList);
+ adapt_set->SegmentList = new_segment_list;
+ }
+
+ for (n = adapt_set->Representations; n; n = n->next) {
+ GstMPDRepresentationNode *representation = n->data;
+
+ if (representation->SegmentList
+ && representation->SegmentList->xlink_href
+ && representation->SegmentList->actuate ==
+ GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+
+ GstMPDSegmentListNode *new_segment_list;
+
+ new_segment_list =
+ gst_mpd_client_fetch_external_segment_list (client, period,
+ adapt_set, representation, adapt_set->SegmentList,
+ representation->SegmentList);
+
+ gst_mpd_segment_list_node_free (representation->SegmentList);
+ representation->SegmentList = new_segment_list;
+
+ }
+ }
+
+ m = m->next;
+ }
+
+ l = l->next;
+ }
+ }
+
+
+ static GstStreamPeriod *
+ gst_mpd_client_get_stream_period (GstMPDClient * client)
+ {
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->periods != NULL, NULL);
+
+ return g_list_nth_data (client->periods, client->period_idx);
+ }
+
+ const gchar *
+ gst_mpd_client_get_baseURL (GstMPDClient * client, guint indexStream)
+ {
+ GstActiveStream *stream;
+
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->active_streams != NULL, NULL);
+ stream = g_list_nth_data (client->active_streams, indexStream);
+ g_return_val_if_fail (stream != NULL, NULL);
+
+ return stream->baseURL;
+ }
+
+ /* select a stream and extract the baseURL (if present) */
+ gchar *
+ gst_mpd_client_parse_baseURL (GstMPDClient * client, GstActiveStream * stream,
+ gchar ** query)
+ {
+ GstStreamPeriod *stream_period;
+ static const gchar empty[] = "";
+ gchar *ret = NULL;
+ GstUri *abs_url;
+
+ g_return_val_if_fail (stream != NULL, g_strdup (empty));
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, g_strdup (empty));
+ g_return_val_if_fail (stream_period->period != NULL, g_strdup (empty));
+
+ /* NULLify query return before we start */
+ if (query)
+ *query = NULL;
+
+ /* initialise base url */
+ abs_url =
+ gst_uri_from_string (client->mpd_base_uri ? client->
+ mpd_base_uri : client->mpd_uri);
+
+ /* combine a BaseURL at the MPD level with the current base url */
+ abs_url =
+ gst_mpd_helper_combine_urls (abs_url, client->mpd_root_node->BaseURLs,
+ query, stream->baseURL_idx);
+
+ /* combine a BaseURL at the Period level with the current base url */
+ abs_url =
+ gst_mpd_helper_combine_urls (abs_url, stream_period->period->BaseURLs,
+ query, stream->baseURL_idx);
+
+ GST_DEBUG ("Current adaptation set id %i (%s)", stream->cur_adapt_set->id,
+ stream->cur_adapt_set->contentType);
+ /* combine a BaseURL at the AdaptationSet level with the current base url */
+ abs_url =
+ gst_mpd_helper_combine_urls (abs_url, stream->cur_adapt_set->BaseURLs,
+ query, stream->baseURL_idx);
+
+ /* combine a BaseURL at the Representation level with the current base url */
+ abs_url =
+ gst_mpd_helper_combine_urls (abs_url,
+ stream->cur_representation->BaseURLs, query, stream->baseURL_idx);
+
+ ret = gst_uri_to_string (abs_url);
+ gst_uri_unref (abs_url);
+
+ return ret;
+ }
+
+ static GstClockTime
+ gst_mpd_client_get_segment_end_time (GstMPDClient * client,
+ GPtrArray * segments, const GstMediaSegment * segment, gint index)
+ {
+ const GstStreamPeriod *stream_period;
+ GstClockTime end;
+
+ if (segment->repeat >= 0)
+ return segment->start + (segment->repeat + 1) * segment->duration;
+
+ if (index < segments->len - 1) {
+ const GstMediaSegment *next_segment =
+ g_ptr_array_index (segments, index + 1);
+ end = next_segment->start;
+ } else {
+ stream_period = gst_mpd_client_get_stream_period (client);
+ end = stream_period->start + stream_period->duration;
+ }
+ return end;
+ }
+
+ static gboolean
+ gst_mpd_client_add_media_segment (GstActiveStream * stream,
+ GstMPDSegmentURLNode * url_node, guint number, gint repeat,
+ guint64 scale_start, guint64 scale_duration,
+ GstClockTime start, GstClockTime duration)
+ {
+ GstMediaSegment *media_segment;
+
+ g_return_val_if_fail (stream->segments != NULL, FALSE);
+
+ media_segment = g_slice_new0 (GstMediaSegment);
+
+ media_segment->SegmentURL = url_node;
+ media_segment->number = number;
+ media_segment->scale_start = scale_start;
+ media_segment->scale_duration = scale_duration;
+ media_segment->start = start;
+ media_segment->duration = duration;
+ media_segment->repeat = repeat;
+
+ g_ptr_array_add (stream->segments, media_segment);
+ GST_LOG ("Added new segment: number %d, repeat %d, "
+ "ts: %" GST_TIME_FORMAT ", dur: %"
+ GST_TIME_FORMAT, number, repeat,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (duration));
+
+ return TRUE;
+ }
+
+ static void
+ gst_mpd_client_stream_update_presentation_time_offset (GstMPDClient * client,
+ GstActiveStream * stream)
+ {
+ GstMPDSegmentBaseNode *segbase = NULL;
+
+ /* Find the used segbase */
+ if (stream->cur_segment_list) {
+ segbase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_segment_list)->SegmentBase;
+ } else if (stream->cur_seg_template) {
+ segbase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template)->SegmentBase;
+ } else if (stream->cur_segment_base) {
+ segbase = stream->cur_segment_base;
+ }
+
+ if (segbase) {
+ /* Avoid overflows */
+ stream->presentationTimeOffset =
+ gst_util_uint64_scale (segbase->presentationTimeOffset, GST_SECOND,
+ segbase->timescale);
+ } else {
+ stream->presentationTimeOffset = 0;
+ }
+
+ GST_LOG ("Setting stream's presentation time offset to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->presentationTimeOffset));
+ }
+
+ gboolean
+ gst_mpd_client_setup_representation (GstMPDClient * client,
+ GstActiveStream * stream, GstMPDRepresentationNode * representation)
+ {
+ GstStreamPeriod *stream_period;
+ GList *rep_list;
+ GstClockTime PeriodStart, PeriodEnd, start_time, duration;
+ guint i;
+ guint64 start;
+
+ if (stream->cur_adapt_set == NULL) {
+ GST_WARNING ("No valid AdaptationSet node in the MPD file, aborting...");
+ return FALSE;
+ }
+
+ rep_list = stream->cur_adapt_set->Representations;
+ stream->cur_representation = representation;
+ stream->representation_idx = g_list_index (rep_list, representation);
+
+ /* clean the old segment list, if any */
+ if (stream->segments) {
+ g_ptr_array_unref (stream->segments);
+ stream->segments = NULL;
+ }
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, FALSE);
+ g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+ PeriodStart = stream_period->start;
+ if (GST_CLOCK_TIME_IS_VALID (stream_period->duration))
+ PeriodEnd = stream_period->start + stream_period->duration;
+ else
+ PeriodEnd = GST_CLOCK_TIME_NONE;
+
+ GST_LOG ("Building segment list for Period from %" GST_TIME_FORMAT " to %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (PeriodStart), GST_TIME_ARGS (PeriodEnd));
+
+ if (representation->SegmentBase != NULL
+ || representation->SegmentList != NULL) {
+ GList *SegmentURL;
+
+ /* We have a fixed list of segments for any of the cases here,
+ * init the segments list */
+ gst_mpdparser_init_active_stream_segments (stream);
+
+ /* get the first segment_base of the selected representation */
+ if ((stream->cur_segment_base =
+ gst_mpd_client_get_segment_base (stream_period->period,
+ stream->cur_adapt_set, representation)) == NULL) {
+ GST_DEBUG ("No useful SegmentBase node for the current Representation");
+ }
+
+ /* get the first segment_list of the selected representation */
+ if ((stream->cur_segment_list =
+ gst_mpd_client_get_segment_list (client, stream_period->period,
+ stream->cur_adapt_set, representation)) == NULL) {
+ GST_DEBUG ("No useful SegmentList node for the current Representation");
+ /* here we should have a single segment for each representation, whose URL is encoded in the baseURL element */
+ if (!gst_mpd_client_add_media_segment (stream, NULL, 1, 0, 0,
+ PeriodEnd - PeriodStart, PeriodStart, PeriodEnd - PeriodStart)) {
+ return FALSE;
+ }
+ } else {
+ /* build the list of GstMediaSegment nodes from the SegmentList node */
+ SegmentURL = stream->cur_segment_list->SegmentURL;
+ if (SegmentURL == NULL) {
+ GST_WARNING
+ ("No valid list of SegmentURL nodes in the MPD file, aborting...");
+ return FALSE;
+ }
+
+ /* build segment list */
+ i = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_segment_list)->startNumber;
+ start = 0;
+ start_time = PeriodStart;
+
+ GST_LOG ("Building media segment list using a SegmentList node");
+ if (GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_segment_list)->SegmentTimeline) {
+ GstMPDSegmentTimelineNode *timeline;
+ GstMPDSNode *S;
+ GList *list;
+ GstClockTime presentationTimeOffset;
+ GstMPDSegmentBaseNode *segbase;
+
+ segbase =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_segment_list)->SegmentBase;
+ presentationTimeOffset =
+ gst_util_uint64_scale (segbase->presentationTimeOffset, GST_SECOND,
+ segbase->timescale);
+ GST_LOG ("presentationTimeOffset = %" G_GUINT64_FORMAT,
+ presentationTimeOffset);
+
+ timeline =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_segment_list)->SegmentTimeline;
+ for (list = g_queue_peek_head_link (&timeline->S); list;
+ list = g_list_next (list)) {
+ guint timescale;
+
+ S = (GstMPDSNode *) list->data;
+ GST_LOG ("Processing S node: d=%" G_GUINT64_FORMAT " r=%d t=%"
+ G_GUINT64_FORMAT, S->d, S->r, S->t);
+ timescale =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_segment_list)->SegmentBase->timescale;
+ duration = gst_util_uint64_scale (S->d, GST_SECOND, timescale);
+
+ if (S->t > 0) {
+ start = S->t;
+ start_time = gst_util_uint64_scale (S->t, GST_SECOND, timescale)
+ + PeriodStart - presentationTimeOffset;
+ }
+
+ if (!SegmentURL) {
+ GST_WARNING
+ ("SegmentTimeline does not have a matching SegmentURL, aborting...");
+ return FALSE;
+ }
+
+ if (!gst_mpd_client_add_media_segment (stream, SegmentURL->data, i,
+ S->r, start, S->d, start_time, duration)) {
+ return FALSE;
+ }
+ i += S->r + 1;
+ start_time += duration * (S->r + 1);
+ start += S->d * (S->r + 1);
+ SegmentURL = g_list_next (SegmentURL);
+ }
+ } else {
+ guint64 scale_dur;
+
+ duration =
+ gst_mpd_client_get_segment_duration (client, stream, &scale_dur);
+ if (!GST_CLOCK_TIME_IS_VALID (duration))
+ return FALSE;
+
+ while (SegmentURL) {
+ if (!gst_mpd_client_add_media_segment (stream, SegmentURL->data, i,
+ 0, start, scale_dur, start_time, duration)) {
+ return FALSE;
+ }
+ i++;
+ start += scale_dur;
+ start_time += duration;
+ SegmentURL = g_list_next (SegmentURL);
+ }
+ }
+ }
+ } else {
+ if (representation->SegmentTemplate != NULL) {
+ stream->cur_seg_template = representation->SegmentTemplate;
+ } else if (stream->cur_adapt_set->SegmentTemplate != NULL) {
+ stream->cur_seg_template = stream->cur_adapt_set->SegmentTemplate;
+ } else if (stream_period->period->SegmentTemplate != NULL) {
+ stream->cur_seg_template = stream_period->period->SegmentTemplate;
+ }
+
+ if (stream->cur_seg_template == NULL) {
+
+ gst_mpdparser_init_active_stream_segments (stream);
+ /* here we should have a single segment for each representation, whose URL is encoded in the baseURL element */
+ if (!gst_mpd_client_add_media_segment (stream, NULL, 1, 0, 0,
+ PeriodEnd - PeriodStart, 0, PeriodEnd - PeriodStart)) {
+ return FALSE;
+ }
+ } else {
+ GstClockTime presentationTimeOffset;
+ GstMPDMultSegmentBaseNode *mult_seg =
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template);
+ presentationTimeOffset =
+ gst_util_uint64_scale (mult_seg->SegmentBase->presentationTimeOffset,
+ GST_SECOND, mult_seg->SegmentBase->timescale);
+ GST_LOG ("presentationTimeOffset = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (presentationTimeOffset));
+ /* build segment list */
+ i = mult_seg->startNumber;
+ start = 0;
+ start_time = 0;
+
+ GST_LOG ("Building media segment list using this template: %s",
+ stream->cur_seg_template->media);
+
+ if (mult_seg->SegmentTimeline) {
+ GstMPDSegmentTimelineNode *timeline;
+ GstMPDSNode *S;
+ GList *list;
+
+ timeline = mult_seg->SegmentTimeline;
+ gst_mpdparser_init_active_stream_segments (stream);
+ for (list = g_queue_peek_head_link (&timeline->S); list;
+ list = g_list_next (list)) {
+ guint timescale;
+
+ S = (GstMPDSNode *) list->data;
+ GST_LOG ("Processing S node: d=%" G_GUINT64_FORMAT " r=%u t=%"
+ G_GUINT64_FORMAT, S->d, S->r, S->t);
+ timescale = mult_seg->SegmentBase->timescale;
+ duration = gst_util_uint64_scale (S->d, GST_SECOND, timescale);
+ if (S->t > 0) {
+ start = S->t;
+ start_time = gst_util_uint64_scale (S->t, GST_SECOND, timescale)
+ + PeriodStart - presentationTimeOffset;
+ }
+
+ if (!gst_mpd_client_add_media_segment (stream, NULL, i, S->r, start,
+ S->d, start_time, duration)) {
+ return FALSE;
+ }
+ i += S->r + 1;
+ start += S->d * (S->r + 1);
+ start_time += duration * (S->r + 1);
+ }
+ } else {
+ /* NOP - The segment is created on demand with the template, no need
+ * to build a list */
+ }
+ }
+ }
+
+ /* clip duration of segments to stop at period end */
+ if (stream->segments && stream->segments->len) {
+ if (GST_CLOCK_TIME_IS_VALID (PeriodEnd)) {
+ guint n;
+
+ for (n = 0; n < stream->segments->len; ++n) {
+ GstMediaSegment *media_segment =
+ g_ptr_array_index (stream->segments, n);
+ if (media_segment) {
+ if (media_segment->start + media_segment->duration > PeriodEnd) {
+ GstClockTime stop = PeriodEnd;
+ if (n < stream->segments->len - 1) {
+ GstMediaSegment *next_segment =
+ g_ptr_array_index (stream->segments, n + 1);
+ if (next_segment && next_segment->start < PeriodEnd)
+ stop = next_segment->start;
+ }
+ media_segment->duration =
+ media_segment->start > stop ? 0 : stop - media_segment->start;
+ GST_LOG ("Fixed duration of segment %u: %" GST_TIME_FORMAT, n,
+ GST_TIME_ARGS (media_segment->duration));
+
+ /* If the segment was clipped entirely, we discard it and all
+ * subsequent ones */
+ if (media_segment->duration == 0) {
+ GST_WARNING ("Discarding %u segments outside period",
+ stream->segments->len - n);
+ /* _set_size should properly unref elements */
+ g_ptr_array_set_size (stream->segments, n);
+ break;
+ }
+ }
+ }
+ }
+ }
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (stream->segments->len > 0) {
+ GstMediaSegment *last_media_segment =
+ g_ptr_array_index (stream->segments, stream->segments->len - 1);
+ GST_LOG ("Built a list of %d segments", last_media_segment->number);
+ } else {
+ GST_LOG ("All media segments were clipped");
+ }
+ #endif
+ }
+
+ g_free (stream->baseURL);
+ g_free (stream->queryURL);
+ stream->baseURL =
+ gst_mpd_client_parse_baseURL (client, stream, &stream->queryURL);
+
+ gst_mpd_client_stream_update_presentation_time_offset (client, stream);
+
+ return TRUE;
+ }
+
+ #define CUSTOM_WRAPPER_START "<custom_wrapper>"
+ #define CUSTOM_WRAPPER_END "</custom_wrapper>"
+
+ static GList *
+ gst_mpd_client_fetch_external_periods (GstMPDClient * client,
+ GstMPDPeriodNode * period_node)
+ {
+ GstFragment *download;
+ GstAdapter *adapter;
+ GstBuffer *period_buffer;
+ GError *err = NULL;
+
+ GstUri *base_uri, *uri;
+ gchar *query = NULL;
+ gchar *uri_string, *wrapper;
+ GList *new_periods = NULL;
+ const gchar *data;
+
+ /* ISO/IEC 23009-1:2014 5.5.3 4)
+ * Remove nodes that resolve to nothing when resolving
+ */
+ if (strcmp (period_node->xlink_href,
+ "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+ return NULL;
+ }
+
+ if (!client->downloader) {
+ return NULL;
+ }
+
+ /* Build absolute URI */
+
+ /* Get base URI at the MPD level */
+ base_uri =
+ gst_uri_from_string (client->mpd_base_uri ? client->
+ mpd_base_uri : client->mpd_uri);
+
+ /* combine a BaseURL at the MPD level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+ &query, 0);
+ uri = gst_uri_from_string_with_base (base_uri, period_node->xlink_href);
+ if (query)
+ gst_uri_set_query_string (uri, query);
+ g_free (query);
+ uri_string = gst_uri_to_string (uri);
+ gst_uri_unref (base_uri);
+ gst_uri_unref (uri);
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (client->downloader,
++ uri_string, client->mpd_uri, NULL, NULL, DEFAULT_ADAPTIVE_RETRY,
++ DEFAULT_ADAPTIVE_TIMEOUT, TRUE, FALSE, TRUE, &err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (client->downloader,
+ uri_string, client->mpd_uri, TRUE, FALSE, TRUE, &err);
++#endif
+ g_free (uri_string);
+
+ if (!download) {
+ GST_ERROR ("Failed to download external Period node at '%s': %s",
+ period_node->xlink_href, err->message);
+ g_clear_error (&err);
+ return NULL;
+ }
+
+ period_buffer = gst_fragment_get_buffer (download);
+ g_object_unref (download);
+
+ /* external xml could have multiple period without root xmlNode.
+ * To avoid xml parsing error caused by no root node, wrapping it with
+ * custom root node */
+ adapter = gst_adapter_new ();
+
+ wrapper = g_new (gchar, strlen (CUSTOM_WRAPPER_START));
+ memcpy (wrapper, CUSTOM_WRAPPER_START, strlen (CUSTOM_WRAPPER_START));
+ gst_adapter_push (adapter,
+ gst_buffer_new_wrapped (wrapper, strlen (CUSTOM_WRAPPER_START)));
+
+ gst_adapter_push (adapter, period_buffer);
+
+ wrapper = g_strdup (CUSTOM_WRAPPER_END);
+ gst_adapter_push (adapter,
+ gst_buffer_new_wrapped (wrapper, strlen (CUSTOM_WRAPPER_END) + 1));
+
+ data = gst_adapter_map (adapter, gst_adapter_available (adapter));
+
+ new_periods =
+ gst_mpdparser_get_external_periods (data,
+ gst_adapter_available (adapter));
+
+ gst_adapter_unmap (adapter);
+ gst_adapter_clear (adapter);
+ gst_object_unref (adapter);
+
+ return new_periods;
+ }
+
+ gboolean
+ gst_mpd_client_setup_media_presentation (GstMPDClient * client,
+ GstClockTime time, gint period_idx, const gchar * period_id)
+ {
+ GstStreamPeriod *stream_period;
+ GstClockTime start, duration;
+ GList *list, *next;
+ guint idx;
+ gboolean ret = FALSE;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ /* Check if we set up the media presentation far enough already */
+ for (list = client->periods; list; list = list->next) {
+ GstStreamPeriod *stream_period = list->data;
+
+ if ((time != GST_CLOCK_TIME_NONE
+ && stream_period->duration != GST_CLOCK_TIME_NONE
+ && stream_period->start + stream_period->duration >= time)
+ || (time != GST_CLOCK_TIME_NONE && stream_period->start >= time))
+ return TRUE;
+
+ if (period_idx != -1 && stream_period->number >= period_idx)
+ return TRUE;
+
+ if (period_id != NULL && stream_period->period->id != NULL
+ && strcmp (stream_period->period->id, period_id) == 0)
+ return TRUE;
+
+ }
+
+ GST_DEBUG ("Building the list of Periods in the Media Presentation");
+ /* clean the old period list, if any */
+ /* TODO: In theory we could reuse the ones we have so far but that
+ * seems more complicated than the overhead caused here
+ */
+ if (client->periods) {
+ g_list_foreach (client->periods,
+ (GFunc) gst_mpdparser_free_stream_period, NULL);
+ g_list_free (client->periods);
+ client->periods = NULL;
+ }
+
+ idx = 0;
+ start = 0;
+ duration = GST_CLOCK_TIME_NONE;
+
+ if (client->mpd_root_node->mediaPresentationDuration <= 0 &&
+ client->mpd_root_node->mediaPresentationDuration != -1) {
+ /* Invalid MPD file: MPD duration is negative or zero */
+ goto syntax_error;
+ }
+
+ for (list = client->mpd_root_node->Periods; list;
+ /* explicitly advanced below */ ) {
+ GstMPDPeriodNode *period_node = list->data;
+ GstMPDPeriodNode *next_period_node = NULL;
+
+ /* Download external period */
+ if (period_node->xlink_href) {
+ GList *new_periods;
+ GList *prev;
+
+ new_periods = gst_mpd_client_fetch_external_periods (client, period_node);
+
+ prev = list->prev;
+ client->mpd_root_node->Periods =
+ g_list_delete_link (client->mpd_root_node->Periods, list);
+ gst_mpd_period_node_free (period_node);
+ period_node = NULL;
+
+ /* Get new next node, we will insert before this */
+ if (prev)
+ next = prev->next;
+ else
+ next = client->mpd_root_node->Periods;
+
+ while (new_periods) {
+ client->mpd_root_node->Periods =
+ g_list_insert_before (client->mpd_root_node->Periods, next,
+ new_periods->data);
+ new_periods = g_list_delete_link (new_periods, new_periods);
+ }
+ next = NULL;
+
+ /* Update our iterator to the first new period if any, or the next */
+ if (prev)
+ list = prev->next;
+ else
+ list = client->mpd_root_node->Periods;
+
+ /* And try again */
+ continue;
+ }
+
+ if (period_node->start != -1) {
+ /* we have a regular period */
+ /* start cannot be smaller than previous start */
+ if (list != g_list_first (client->mpd_root_node->Periods)
+ && start >= period_node->start * GST_MSECOND) {
+ /* Invalid MPD file: duration would be negative or zero */
+ goto syntax_error;
+ }
+ start = period_node->start * GST_MSECOND;
+ } else if (duration != GST_CLOCK_TIME_NONE) {
+ /* start time inferred from previous period, this is still a regular period */
+ start += duration;
+ } else if (idx == 0
+ && client->mpd_root_node->type == GST_MPD_FILE_TYPE_STATIC) {
+ /* first period of a static MPD file, start time is 0 */
+ start = 0;
+ } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+ /* this should be a live stream, let this pass */
+ } else {
+ /* this is an 'Early Available Period' */
+ goto early;
+ }
+
+ /* compute duration.
+ If there is a start time for the next period, or this is the last period
+ and mediaPresentationDuration was set, those values will take precedence
+ over a configured period duration in computing this period's duration
+
+ ISO/IEC 23009-1:2014(E), chapter 5.3.2.1
+ "The Period extends until the PeriodStart of the next Period, or until
+ the end of the Media Presentation in the case of the last Period."
+ */
+
+ while ((next = g_list_next (list)) != NULL) {
+ /* try to infer this period duration from the start time of the next period */
+ next_period_node = next->data;
+
+ if (next_period_node->xlink_href) {
+ GList *new_periods;
+
+ new_periods =
+ gst_mpd_client_fetch_external_periods (client, next_period_node);
+
+ client->mpd_root_node->Periods =
+ g_list_delete_link (client->mpd_root_node->Periods, next);
+ gst_mpd_period_node_free (next_period_node);
+ next_period_node = NULL;
+ /* Get new next node, we will insert before this */
+ next = g_list_next (list);
+ while (new_periods) {
+ client->mpd_root_node->Periods =
+ g_list_insert_before (client->mpd_root_node->Periods, next,
+ new_periods->data);
+ new_periods = g_list_delete_link (new_periods, new_periods);
+ }
+
+ /* And try again, getting the next list element which is now our newly
+ * inserted nodes. If any */
+ } else {
+ /* Got the next period and it doesn't have to be downloaded first */
+ break;
+ }
+ }
+
+ if (next_period_node) {
+ if (next_period_node->start != -1) {
+ if (start >= next_period_node->start * GST_MSECOND) {
+ /* Invalid MPD file: duration would be negative or zero */
+ goto syntax_error;
+ }
+ duration = next_period_node->start * GST_MSECOND - start;
+ } else if (period_node->duration != -1) {
+ if (period_node->duration <= 0) {
+ /* Invalid MPD file: duration would be negative or zero */
+ goto syntax_error;
+ }
+ duration = period_node->duration * GST_MSECOND;
+ } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+ /* might be a live file, ignore unspecified duration */
+ } else {
+ /* Invalid MPD file! */
+ goto syntax_error;
+ }
+ } else if (client->mpd_root_node->mediaPresentationDuration != -1) {
+ /* last Period of the Media Presentation */
+ if (client->mpd_root_node->mediaPresentationDuration * GST_MSECOND <=
+ start) {
+ /* Invalid MPD file: duration would be negative or zero */
+ goto syntax_error;
+ }
+ duration =
+ client->mpd_root_node->mediaPresentationDuration * GST_MSECOND -
+ start;
+ } else if (period_node->duration != -1) {
+ duration = period_node->duration * GST_MSECOND;
+ } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+ /* might be a live file, ignore unspecified duration */
+ } else {
+ /* Invalid MPD file! */
+ GST_ERROR
+ ("Invalid MPD file. The MPD is static without a valid duration");
+ goto syntax_error;
+ }
+
+ stream_period = g_slice_new0 (GstStreamPeriod);
+ client->periods = g_list_append (client->periods, stream_period);
+ stream_period->period = period_node;
+ stream_period->number = idx++;
+ stream_period->start = start;
+ stream_period->duration = duration;
+ ret = TRUE;
+ GST_LOG (" - added Period %d start=%" GST_TIME_FORMAT " duration=%"
+ GST_TIME_FORMAT, idx, GST_TIME_ARGS (start), GST_TIME_ARGS (duration));
+
+ if ((time != GST_CLOCK_TIME_NONE
+ && stream_period->duration != GST_CLOCK_TIME_NONE
+ && stream_period->start + stream_period->duration >= time)
+ || (time != GST_CLOCK_TIME_NONE && stream_period->start >= time))
+ break;
+
+ if (period_idx != -1 && stream_period->number >= period_idx)
+ break;
+
+ if (period_id != NULL && stream_period->period->id != NULL
+ && strcmp (stream_period->period->id, period_id) == 0)
+ break;
+
+ list = list->next;
+ }
+
+ GST_DEBUG
+ ("Found a total of %d valid Periods in the Media Presentation up to this point",
+ idx);
+ return ret;
+
+ early:
+ GST_WARNING
+ ("Found an Early Available Period, skipping the rest of the Media Presentation");
+ return ret;
+
+ syntax_error:
+ GST_WARNING
+ ("Cannot get the duration of the Period %d, skipping the rest of the Media Presentation",
+ idx);
+ return ret;
+ }
+
+ static GList *
+ gst_mpd_client_fetch_external_adaptation_set (GstMPDClient * client,
+ GstMPDPeriodNode * period, GstMPDAdaptationSetNode * adapt_set)
+ {
+ GstFragment *download;
+ GstBuffer *adapt_set_buffer;
+ GstMapInfo map;
+ GError *err = NULL;
+ GstUri *base_uri, *uri;
+ gchar *query = NULL;
+ gchar *uri_string;
+ GList *new_adapt_sets = NULL;
+
+ /* ISO/IEC 23009-1:2014 5.5.3 4)
+ * Remove nodes that resolve to nothing when resolving
+ */
+ if (strcmp (adapt_set->xlink_href, "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+ return NULL;
+ }
+
+ if (!client->downloader) {
+ return NULL;
+ }
+
+ /* Build absolute URI */
+
+ /* Get base URI at the MPD level */
+ base_uri =
+ gst_uri_from_string (client->mpd_base_uri ? client->
+ mpd_base_uri : client->mpd_uri);
+
+ /* combine a BaseURL at the MPD level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+ &query, 0);
+
+ /* combine a BaseURL at the Period level with the current base url */
+ base_uri =
+ gst_mpd_helper_combine_urls (base_uri, period->BaseURLs, &query, 0);
+
+ uri = gst_uri_from_string_with_base (base_uri, adapt_set->xlink_href);
+ if (query)
+ gst_uri_set_query_string (uri, query);
+ g_free (query);
+ uri_string = gst_uri_to_string (uri);
+ gst_uri_unref (base_uri);
+ gst_uri_unref (uri);
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (client->downloader,
++ uri_string, client->mpd_uri, NULL, NULL, DEFAULT_ADAPTIVE_RETRY,
++ DEFAULT_ADAPTIVE_TIMEOUT, TRUE, FALSE, TRUE, &err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (client->downloader,
+ uri_string, client->mpd_uri, TRUE, FALSE, TRUE, &err);
++#endif
+ g_free (uri_string);
+
+ if (!download) {
+ GST_ERROR ("Failed to download external AdaptationSet node at '%s': %s",
+ adapt_set->xlink_href, err->message);
+ g_clear_error (&err);
+ return NULL;
+ }
+
+ adapt_set_buffer = gst_fragment_get_buffer (download);
+ g_object_unref (download);
+
+ gst_buffer_map (adapt_set_buffer, &map, GST_MAP_READ);
+
+ new_adapt_sets =
+ gst_mpdparser_get_external_adaptation_sets ((const gchar *) map.data,
+ map.size, period);
+
+ gst_buffer_unmap (adapt_set_buffer, &map);
+ gst_buffer_unref (adapt_set_buffer);
+
+ return new_adapt_sets;
+ }
+
+ static GList *
+ gst_mpd_client_get_adaptation_sets_for_period (GstMPDClient * client,
+ GstStreamPeriod * period)
+ {
+ GList *list;
+
+ g_return_val_if_fail (period != NULL, NULL);
+
+ /* Resolve all external adaptation sets of this period. Every user of
+ * the adaptation sets would need to know the content of all adaptation sets
+ * to decide which one to use, so we have to resolve them all here
+ */
+ for (list = period->period->AdaptationSets; list;
+ /* advanced explicitly below */ ) {
+ GstMPDAdaptationSetNode *adapt_set = (GstMPDAdaptationSetNode *) list->data;
+ GList *new_adapt_sets = NULL, *prev, *next;
+
+ if (!adapt_set->xlink_href) {
+ list = list->next;
+ continue;
+ }
+
+ new_adapt_sets =
+ gst_mpd_client_fetch_external_adaptation_set (client, period->period,
+ adapt_set);
+
+ prev = list->prev;
+ period->period->AdaptationSets =
+ g_list_delete_link (period->period->AdaptationSets, list);
+ gst_mpd_adaptation_set_node_free (adapt_set);
+ adapt_set = NULL;
+
+ /* Get new next node, we will insert before this */
+ if (prev)
+ next = prev->next;
+ else
+ next = period->period->AdaptationSets;
+
+ while (new_adapt_sets) {
+ period->period->AdaptationSets =
+ g_list_insert_before (period->period->AdaptationSets, next,
+ new_adapt_sets->data);
+ new_adapt_sets = g_list_delete_link (new_adapt_sets, new_adapt_sets);
+ }
+
+ /* Update our iterator to the first new adaptation set if any, or the next */
+ if (prev)
+ list = prev->next;
+ else
+ list = period->period->AdaptationSets;
+ }
+
+ return period->period->AdaptationSets;
+ }
+
+ GList *
+ gst_mpd_client_get_adaptation_sets (GstMPDClient * client)
+ {
+ GstStreamPeriod *stream_period;
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ if (stream_period == NULL || stream_period->period == NULL) {
+ GST_DEBUG ("No more Period nodes in the MPD file, terminating...");
+ return NULL;
+ }
+
+ return gst_mpd_client_get_adaptation_sets_for_period (client, stream_period);
+ }
+
+ gboolean
+ gst_mpd_client_setup_streaming (GstMPDClient * client,
+ GstMPDAdaptationSetNode * adapt_set)
+ {
+ GstMPDRepresentationNode *representation;
+ GList *rep_list = NULL;
+ GstActiveStream *stream;
+
+ rep_list = adapt_set->Representations;
+ if (!rep_list) {
+ GST_WARNING ("Can not retrieve any representation, aborting...");
+ return FALSE;
+ }
+
+ stream = g_slice_new0 (GstActiveStream);
+ gst_mpdparser_init_active_stream_segments (stream);
+
+ stream->baseURL_idx = 0;
+ stream->cur_adapt_set = adapt_set;
+
+ GST_DEBUG ("0. Current stream %p", stream);
+
+ #if 0
+ /* fast start */
+ representation =
+ gst_mpdparser_get_representation_with_max_bandwidth (rep_list,
+ stream->max_bandwidth);
+
+ if (!representation) {
+ GST_WARNING
+ ("Can not retrieve a representation with the requested bandwidth");
+ representation = gst_mpd_client_get_lowest_representation (rep_list);
+ }
+ #else
+ /* slow start */
+ representation = gst_mpd_client_get_lowest_representation (rep_list);
+ #endif
+
+ if (!representation) {
+ GST_WARNING ("No valid representation in the MPD file, aborting...");
+ gst_mpdparser_free_active_stream (stream);
+ return FALSE;
+ }
+ stream->mimeType =
+ gst_mpdparser_representation_get_mimetype (adapt_set, representation);
+ if (stream->mimeType == GST_STREAM_UNKNOWN) {
+ GST_WARNING ("Unknown mime type in the representation, aborting...");
+ gst_mpdparser_free_active_stream (stream);
+ return FALSE;
+ }
+
+ client->active_streams = g_list_append (client->active_streams, stream);
+ if (!gst_mpd_client_setup_representation (client, stream, representation)) {
+ GST_WARNING ("Failed to setup the representation, aborting...");
+ return FALSE;
+ }
+
+ GST_INFO ("Successfully setup the download pipeline for mimeType %d",
+ stream->mimeType);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_mpd_client_stream_seek (GstMPDClient * client, GstActiveStream * stream,
+ gboolean forward, GstSeekFlags flags, GstClockTime ts,
+ GstClockTime * final_ts)
+ {
+ gint index = 0;
+ gint repeat_index = 0;
+ GstMediaSegment *selectedChunk = NULL;
+
+ g_return_val_if_fail (stream != NULL, 0);
+
+ if (stream->segments) {
+ for (index = 0; index < stream->segments->len; index++) {
+ gboolean in_segment = FALSE;
+ GstMediaSegment *segment = g_ptr_array_index (stream->segments, index);
+ GstClockTime end_time;
+
+ GST_DEBUG ("Looking at fragment sequence chunk %d / %d", index,
+ stream->segments->len);
+
+ end_time =
+ gst_mpd_client_get_segment_end_time (client, stream->segments,
+ segment, index);
+
+ /* avoid downloading another fragment just for 1ns in reverse mode */
+ if (forward)
+ in_segment = ts < end_time;
+ else
+ in_segment = ts <= end_time;
+
+ if (in_segment) {
+ GstClockTime chunk_time;
+
+ selectedChunk = segment;
+ repeat_index = (ts - segment->start) / segment->duration;
+
+ chunk_time = segment->start + segment->duration * repeat_index;
+
+ /* At the end of a segment in reverse mode, start from the previous fragment */
+ if (!forward && repeat_index > 0
+ && ((ts - segment->start) % segment->duration == 0))
+ repeat_index--;
+
+ if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+ if (repeat_index + 1 < segment->repeat) {
+ if (ts - chunk_time > chunk_time + segment->duration - ts)
+ repeat_index++;
+ } else if (index + 1 < stream->segments->len) {
+ GstMediaSegment *next_segment =
+ g_ptr_array_index (stream->segments, index + 1);
+
+ if (ts - chunk_time > next_segment->start - ts) {
+ repeat_index = 0;
+ selectedChunk = next_segment;
+ index++;
+ }
+ }
+ } else if (((forward && flags & GST_SEEK_FLAG_SNAP_AFTER) ||
+ (!forward && flags & GST_SEEK_FLAG_SNAP_BEFORE)) &&
+ ts != chunk_time) {
+
+ if (repeat_index + 1 < segment->repeat) {
+ repeat_index++;
+ } else {
+ repeat_index = 0;
+ if (index + 1 >= stream->segments->len) {
+ selectedChunk = NULL;
+ } else {
+ selectedChunk = g_ptr_array_index (stream->segments, ++index);
+ }
+ }
+ }
+ break;
+ }
+ }
+
+ if (selectedChunk == NULL) {
+ stream->segment_index = stream->segments->len;
+ stream->segment_repeat_index = 0;
+ GST_DEBUG ("Seek to after last segment");
+ return FALSE;
+ }
+
+ if (final_ts)
+ *final_ts = selectedChunk->start + selectedChunk->duration * repeat_index;
+ } else {
+ GstClockTime duration =
+ gst_mpd_client_get_segment_duration (client, stream, NULL);
+ GstStreamPeriod *stream_period = gst_mpd_client_get_stream_period (client);
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+ GstClockTime index_time;
+
+ g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+ (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+ if (!GST_CLOCK_TIME_IS_VALID (duration)) {
+ return FALSE;
+ }
+
+ if (ts > stream_period->start)
+ ts -= stream_period->start;
+ else
+ ts = 0;
+
+ index = ts / duration;
+
+ /* At the end of a segment in reverse mode, start from the previous fragment */
+ if (!forward && index > 0 && ts % duration == 0)
+ index--;
+
+ index_time = index * duration;
+
+ if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+ if (ts - index_time > index_time + duration - ts)
+ index++;
+ } else if (((forward && flags & GST_SEEK_FLAG_SNAP_AFTER) ||
+ (!forward && flags & GST_SEEK_FLAG_SNAP_BEFORE))
+ && ts != index_time) {
+ index++;
+ }
+
+ if (segments_count > 0 && index >= segments_count) {
+ stream->segment_index = segments_count;
+ stream->segment_repeat_index = 0;
+ GST_DEBUG ("Seek to after last segment");
+ return FALSE;
+ }
+ if (final_ts)
+ *final_ts = index * duration;
+ }
+
+ stream->segment_repeat_index = repeat_index;
+ stream->segment_index = index;
+
+ return TRUE;
+ }
+
+ gint64
+ gst_mpd_client_calculate_time_difference (const GstDateTime * t1,
+ const GstDateTime * t2)
+ {
+ GDateTime *gdt1, *gdt2;
+ GTimeSpan diff;
+
+ g_assert (t1 != NULL && t2 != NULL);
+ gdt1 = gst_date_time_to_g_date_time ((GstDateTime *) t1);
+ gdt2 = gst_date_time_to_g_date_time ((GstDateTime *) t2);
+ diff = g_date_time_difference (gdt2, gdt1);
+ g_date_time_unref (gdt1);
+ g_date_time_unref (gdt2);
+ return diff * GST_USECOND;
+ }
+
+ GstDateTime *
+ gst_mpd_client_add_time_difference (GstDateTime * t1, gint64 usecs)
+ {
+ GDateTime *gdt;
+ GDateTime *gdt2;
+ GstDateTime *rv;
+
+ g_assert (t1 != NULL);
+ gdt = gst_date_time_to_g_date_time (t1);
+ g_assert (gdt != NULL);
+ gdt2 = g_date_time_add (gdt, usecs);
+ g_assert (gdt2 != NULL);
+ g_date_time_unref (gdt);
+ rv = gst_date_time_new_from_g_date_time (gdt2);
+
+ /* Don't g_date_time_unref(gdt2) because gst_date_time_new_from_g_date_time takes
+ * ownership of the GDateTime pointer.
+ */
+
+ return rv;
+ }
+
+ gboolean
+ gst_mpd_client_get_last_fragment_timestamp_end (GstMPDClient * client,
+ guint stream_idx, GstClockTime * ts)
+ {
+ GstActiveStream *stream;
+ gint segment_idx;
+ GstMediaSegment *currentChunk;
+ GstStreamPeriod *stream_period;
+
+ GST_DEBUG ("Stream index: %i", stream_idx);
+ stream = g_list_nth_data (client->active_streams, stream_idx);
+ g_return_val_if_fail (stream != NULL, 0);
+
+ if (!stream->segments) {
+ stream_period = gst_mpd_client_get_stream_period (client);
+ *ts = stream_period->start + stream_period->duration;
+ } else {
+ segment_idx = gst_mpd_client_get_segments_counts (client, stream) - 1;
+ if (segment_idx >= stream->segments->len) {
+ GST_WARNING ("Segment index %d is outside of segment list of length %d",
+ segment_idx, stream->segments->len);
+ return FALSE;
+ }
+ currentChunk = g_ptr_array_index (stream->segments, segment_idx);
+
+ if (currentChunk->repeat >= 0) {
+ *ts =
+ currentChunk->start + (currentChunk->duration * (1 +
+ currentChunk->repeat));
+ } else {
+ /* 5.3.9.6.1: negative repeat means repeat till the end of the
+ * period, or the next update of the MPD (which I think is
+ * implicit, as this will all get deleted/recreated), or the
+ * start of the next segment, if any. */
+ stream_period = gst_mpd_client_get_stream_period (client);
+ *ts = stream_period->start + stream_period->duration;
+ }
+ }
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_mpd_client_get_next_fragment_timestamp (GstMPDClient * client,
+ guint stream_idx, GstClockTime * ts)
+ {
+ GstActiveStream *stream;
+ GstMediaSegment *currentChunk;
+
+ GST_DEBUG ("Stream index: %i", stream_idx);
+ stream = g_list_nth_data (client->active_streams, stream_idx);
+ g_return_val_if_fail (stream != NULL, 0);
+
+ if (stream->segments) {
+ GST_DEBUG ("Looking for fragment sequence chunk %d / %d",
+ stream->segment_index, stream->segments->len);
+ if (stream->segment_index >= stream->segments->len)
+ return FALSE;
+ currentChunk = g_ptr_array_index (stream->segments, stream->segment_index);
+
+ *ts =
+ currentChunk->start +
+ (currentChunk->duration * stream->segment_repeat_index);
+ } else {
+ GstClockTime duration =
+ gst_mpd_client_get_segment_duration (client, stream, NULL);
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+
+ g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+ (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+ if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+ && stream->segment_index >= segments_count)) {
+ return FALSE;
+ }
+ *ts = stream->segment_index * duration;
+ }
+
+ return TRUE;
+ }
+
+ GstClockTime
+ gst_mpd_client_get_stream_presentation_offset (GstMPDClient * client,
+ guint stream_idx)
+ {
+ GstActiveStream *stream = NULL;
+
+ g_return_val_if_fail (client != NULL, 0);
+ g_return_val_if_fail (client->active_streams != NULL, 0);
+ stream = g_list_nth_data (client->active_streams, stream_idx);
+ g_return_val_if_fail (stream != NULL, 0);
+
+ return stream->presentationTimeOffset;
+ }
+
+ GstClockTime
+ gst_mpd_client_get_period_start_time (GstMPDClient * client)
+ {
+ GstStreamPeriod *stream_period = NULL;
+
+ g_return_val_if_fail (client != NULL, 0);
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, 0);
+
+ return stream_period->start;
+ }
+
+ /**
+ * gst_mpd_client_get_utc_timing_sources:
+ * @client: #GstMPDClient to check for UTCTiming elements
+ * @methods: A bit mask of #GstMPDUTCTimingType that specifies the methods
+ * to search for.
+ * @selected_method: (nullable): The selected method
+ * Returns: (transfer none): A NULL terminated array of URLs of servers
+ * that use @selected_method to provide a realtime clock.
+ *
+ * Searches the UTCTiming elements found in the manifest for an element
+ * that uses one of the UTC timing methods specified in @selected_method.
+ * If multiple UTCTiming elements are present that support one of the
+ * methods specified in @selected_method, the first one is returned.
+ *
+ * Since: 1.6
+ */
+ gchar **
+ gst_mpd_client_get_utc_timing_sources (GstMPDClient * client,
+ guint methods, GstMPDUTCTimingType * selected_method)
+ {
+ GList *list;
+
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+ for (list = g_list_first (client->mpd_root_node->UTCTimings); list;
+ list = g_list_next (list)) {
+ const GstMPDUTCTimingNode *node = (const GstMPDUTCTimingNode *) list->data;
+ if (node->method & methods) {
+ if (selected_method) {
+ *selected_method = node->method;
+ }
+ return node->urls;
+ }
+ }
+ return NULL;
+ }
+
+
+ gboolean
+ gst_mpd_client_get_next_fragment (GstMPDClient * client,
+ guint indexStream, GstMediaFragmentInfo * fragment)
+ {
+ GstActiveStream *stream = NULL;
+ GstMediaSegment *currentChunk;
+ gchar *mediaURL = NULL;
+ gchar *indexURL = NULL;
+ GstUri *base_url, *frag_url;
+
+ /* select stream */
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->active_streams != NULL, FALSE);
+ stream = g_list_nth_data (client->active_streams, indexStream);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+
+ if (stream->segments) {
+ GST_DEBUG ("Looking for fragment sequence chunk %d / %d",
+ stream->segment_index, stream->segments->len);
+ if (stream->segment_index >= stream->segments->len)
+ return FALSE;
+ } else {
+ GstClockTime duration = gst_mpd_client_get_segment_duration (client,
+ stream, NULL);
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+
+ g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+ (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+ if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+ && stream->segment_index >= segments_count)) {
+ return FALSE;
+ }
+ fragment->duration = duration;
+ }
+
+ /* FIXME rework discont checking */
+ /* fragment->discontinuity = segment_idx != currentChunk.number; */
+ fragment->range_start = 0;
+ fragment->range_end = -1;
+ fragment->index_uri = NULL;
+ fragment->index_range_start = 0;
+ fragment->index_range_end = -1;
+
+ if (stream->segments) {
+ currentChunk = g_ptr_array_index (stream->segments, stream->segment_index);
+
+ GST_DEBUG ("currentChunk->SegmentURL = %p", currentChunk->SegmentURL);
+ if (currentChunk->SegmentURL != NULL) {
+ mediaURL =
+ g_strdup (gst_mpdparser_get_mediaURL (stream,
+ currentChunk->SegmentURL));
+ indexURL = g_strdup (currentChunk->SegmentURL->index);
+ } else if (stream->cur_seg_template != NULL) {
+ mediaURL =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+ media, stream->cur_representation->id,
+ currentChunk->number + stream->segment_repeat_index,
+ stream->cur_representation->bandwidth,
+ currentChunk->scale_start +
+ stream->segment_repeat_index * currentChunk->scale_duration);
+ if (stream->cur_seg_template->index) {
+ indexURL =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+ index, stream->cur_representation->id,
+ currentChunk->number + stream->segment_repeat_index,
+ stream->cur_representation->bandwidth,
+ currentChunk->scale_start +
+ stream->segment_repeat_index * currentChunk->scale_duration);
+ }
+ }
+ GST_DEBUG ("mediaURL = %s", mediaURL);
+ GST_DEBUG ("indexURL = %s", indexURL);
+
+ fragment->timestamp =
+ currentChunk->start +
+ stream->segment_repeat_index * currentChunk->duration;
+ fragment->duration = currentChunk->duration;
+ if (currentChunk->SegmentURL) {
+ if (currentChunk->SegmentURL->mediaRange) {
+ fragment->range_start =
+ currentChunk->SegmentURL->mediaRange->first_byte_pos;
+ fragment->range_end =
+ currentChunk->SegmentURL->mediaRange->last_byte_pos;
+ }
+ if (currentChunk->SegmentURL->indexRange) {
+ fragment->index_range_start =
+ currentChunk->SegmentURL->indexRange->first_byte_pos;
+ fragment->index_range_end =
+ currentChunk->SegmentURL->indexRange->last_byte_pos;
+ }
+ }
+ } else {
+ if (stream->cur_seg_template != NULL) {
+ mediaURL =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+ media, stream->cur_representation->id,
+ stream->segment_index +
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_seg_template)->startNumber,
+ stream->cur_representation->bandwidth,
+ stream->segment_index * fragment->duration);
+ if (stream->cur_seg_template->index) {
+ indexURL =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+ index, stream->cur_representation->id,
+ stream->segment_index +
+ GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+ cur_seg_template)->startNumber,
+ stream->cur_representation->bandwidth,
+ stream->segment_index * fragment->duration);
+ }
+ } else {
+ return FALSE;
+ }
+
+ GST_DEBUG ("mediaURL = %s", mediaURL);
+ GST_DEBUG ("indexURL = %s", indexURL);
+
+ fragment->timestamp = stream->segment_index * fragment->duration;
+ }
+
+ base_url = gst_uri_from_string (stream->baseURL);
+ frag_url = gst_uri_from_string_with_base (base_url, mediaURL);
+ g_free (mediaURL);
+ if (stream->queryURL) {
+ frag_url = gst_uri_make_writable (frag_url);
+ gst_uri_set_query_string (frag_url, stream->queryURL);
+ }
+ fragment->uri = gst_uri_to_string (frag_url);
+ gst_uri_unref (frag_url);
+
+ if (indexURL != NULL) {
+ frag_url = gst_uri_make_writable (gst_uri_from_string_with_base (base_url,
+ indexURL));
+ gst_uri_set_query_string (frag_url, stream->queryURL);
+ fragment->index_uri = gst_uri_to_string (frag_url);
+ gst_uri_unref (frag_url);
+ g_free (indexURL);
+ } else if (indexURL == NULL && (fragment->index_range_start
+ || fragment->index_range_end != -1)) {
+ /* index has no specific URL but has a range, we should only use this if
+ * the media also has a range, otherwise we are serving some data twice
+ * (in the media fragment and again in the index) */
+ if (!(fragment->range_start || fragment->range_end != -1)) {
+ GST_WARNING ("Ignoring index ranges because there isn't a media range "
+ "and URIs would be the same");
+ /* removing index information */
+ fragment->index_range_start = 0;
+ fragment->index_range_end = -1;
+ }
+ }
+
+ gst_uri_unref (base_url);
+
+ GST_DEBUG ("Loading chunk with URL %s", fragment->uri);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_mpd_client_has_next_segment (GstMPDClient * client,
+ GstActiveStream * stream, gboolean forward)
+ {
+ if (forward) {
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+
+ if (segments_count > 0 && stream->segments
+ && stream->segment_index + 1 == segments_count) {
+ GstMediaSegment *segment;
+
+ segment = g_ptr_array_index (stream->segments, stream->segment_index);
+ if (segment->repeat >= 0
+ && stream->segment_repeat_index >= segment->repeat)
+ return FALSE;
+ } else if (segments_count > 0
+ && stream->segment_index + 1 >= segments_count) {
+ return FALSE;
+ }
+ } else {
+ if (stream->segment_index < 0)
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ GstFlowReturn
+ gst_mpd_client_advance_segment (GstMPDClient * client, GstActiveStream * stream,
+ gboolean forward)
+ {
+ GstMediaSegment *segment;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+
+ GST_DEBUG ("Advancing segment. Current: %d / %d r:%d", stream->segment_index,
+ segments_count, stream->segment_repeat_index);
+
+ /* handle special cases first */
+ if (forward) {
+ if (segments_count > 0 && stream->segment_index >= segments_count) {
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+
+ if (stream->segments == NULL) {
+ if (stream->segment_index < 0) {
+ stream->segment_index = 0;
+ } else {
+ stream->segment_index++;
+ if (segments_count > 0 && stream->segment_index >= segments_count) {
+ ret = GST_FLOW_EOS;
+ }
+ }
+ goto done;
+ }
+
+ /* special case for when playback direction is reverted right at *
+ * the end of the segment list */
+ if (stream->segment_index < 0) {
+ stream->segment_index = 0;
+ goto done;
+ }
+ } else {
+ if (stream->segments == NULL)
+ stream->segment_index--;
+ if (stream->segment_index < 0) {
+ stream->segment_index = -1;
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+ if (stream->segments == NULL)
+ goto done;
+
+ /* special case for when playback direction is reverted right at *
+ * the end of the segment list */
+ if (stream->segment_index >= segments_count) {
+ stream->segment_index = segments_count - 1;
+ segment = g_ptr_array_index (stream->segments, stream->segment_index);
+ if (segment->repeat >= 0) {
+ stream->segment_repeat_index = segment->repeat;
+ } else {
+ GstClockTime start = segment->start;
+ GstClockTime end =
+ gst_mpd_client_get_segment_end_time (client, stream->segments,
+ segment,
+ stream->segment_index);
+ stream->segment_repeat_index =
+ (guint) (end - start) / segment->duration;
+ }
+ goto done;
+ }
+ }
+
+ /* for the normal cases we can get the segment safely here */
+ segment = g_ptr_array_index (stream->segments, stream->segment_index);
+ if (forward) {
+ if (segment->repeat >= 0 && stream->segment_repeat_index >= segment->repeat) {
+ stream->segment_repeat_index = 0;
+ stream->segment_index++;
+ if (segments_count > 0 && stream->segment_index >= segments_count) {
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+ } else {
+ stream->segment_repeat_index++;
+ }
+ } else {
+ if (stream->segment_repeat_index == 0) {
+ stream->segment_index--;
+ if (stream->segment_index < 0) {
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+
+ segment = g_ptr_array_index (stream->segments, stream->segment_index);
+ /* negative repeats only seem to make sense at the end of a list,
+ * so this one will probably not be. Needs some sanity checking
+ * when loading the XML data. */
+ if (segment->repeat >= 0) {
+ stream->segment_repeat_index = segment->repeat;
+ } else {
+ GstClockTime start = segment->start;
+ GstClockTime end =
+ gst_mpd_client_get_segment_end_time (client, stream->segments,
+ segment,
+ stream->segment_index);
+ stream->segment_repeat_index =
+ (guint) (end - start) / segment->duration;
+ }
+ } else {
+ stream->segment_repeat_index--;
+ }
+ }
+
+ done:
+ GST_DEBUG ("Advanced to segment: %d / %d r:%d (ret: %s)",
+ stream->segment_index, segments_count,
+ stream->segment_repeat_index, gst_flow_get_name (ret));
+ return ret;
+ }
+
+ gboolean
+ gst_mpd_client_get_next_header (GstMPDClient * client, gchar ** uri,
+ guint stream_idx, gint64 * range_start, gint64 * range_end)
+ {
+ GstActiveStream *stream;
+ GstStreamPeriod *stream_period;
+
+ stream = gst_mpd_client_get_active_stream_by_index (client, stream_idx);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, FALSE);
+ g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+ *range_start = 0;
+ *range_end = -1;
+
+ GST_DEBUG ("Looking for current representation header");
+ *uri = NULL;
+ if (stream->cur_segment_base) {
+ if (stream->cur_segment_base->Initialization) {
+ *uri =
+ g_strdup (gst_mpdparser_get_initializationURL (stream,
+ stream->cur_segment_base->Initialization));
+ if (stream->cur_segment_base->Initialization->range) {
+ *range_start =
+ stream->cur_segment_base->Initialization->range->first_byte_pos;
+ *range_end =
+ stream->cur_segment_base->Initialization->range->last_byte_pos;
+ }
+ } else if (stream->cur_segment_base->indexRange) {
+ *uri =
+ g_strdup (gst_mpdparser_get_initializationURL (stream,
+ stream->cur_segment_base->Initialization));
+ *range_start = 0;
+ *range_end = stream->cur_segment_base->indexRange->first_byte_pos - 1;
+ }
+ } else if (stream->cur_seg_template
+ && stream->cur_seg_template->initialization) {
+ *uri =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+ initialization, stream->cur_representation->id, 0,
+ stream->cur_representation->bandwidth, 0);
+ }
+
+ return *uri == NULL ? FALSE : TRUE;
+ }
+
+ gboolean
+ gst_mpd_client_get_next_header_index (GstMPDClient * client, gchar ** uri,
+ guint stream_idx, gint64 * range_start, gint64 * range_end)
+ {
+ GstActiveStream *stream;
+ GstStreamPeriod *stream_period;
+
+ stream = gst_mpd_client_get_active_stream_by_index (client, stream_idx);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, FALSE);
+ g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+ *range_start = 0;
+ *range_end = -1;
+
+ GST_DEBUG ("Looking for current representation index");
+ *uri = NULL;
+ if (stream->cur_segment_base && stream->cur_segment_base->indexRange) {
+ *uri =
+ g_strdup (gst_mpdparser_get_initializationURL (stream,
+ stream->cur_segment_base->RepresentationIndex));
+ *range_start = stream->cur_segment_base->indexRange->first_byte_pos;
+ *range_end = stream->cur_segment_base->indexRange->last_byte_pos;
+ } else if (stream->cur_seg_template && stream->cur_seg_template->index) {
+ *uri =
+ gst_mpdparser_build_URL_from_template (stream->cur_seg_template->index,
+ stream->cur_representation->id, 0,
+ stream->cur_representation->bandwidth, 0);
+ }
+
+ return *uri == NULL ? FALSE : TRUE;
+ }
+
+ GstClockTime
+ gst_mpd_client_get_next_fragment_duration (GstMPDClient * client,
+ GstActiveStream * stream)
+ {
+ GstMediaSegment *media_segment = NULL;
+ gint seg_idx;
+
+ g_return_val_if_fail (stream != NULL, 0);
+
+ seg_idx = stream->segment_index;
+
+ if (stream->segments) {
+ if (seg_idx < stream->segments->len && seg_idx >= 0)
+ media_segment = g_ptr_array_index (stream->segments, seg_idx);
+
+ return media_segment == NULL ? 0 : media_segment->duration;
+ } else {
+ GstClockTime duration =
+ gst_mpd_client_get_segment_duration (client, stream, NULL);
+ guint segments_count = gst_mpd_client_get_segments_counts (client, stream);
+
+ g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+ (stream->cur_seg_template)->SegmentTimeline == NULL, 0);
+
+ if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+ && seg_idx >= segments_count)) {
+ return 0;
+ }
+ return duration;
+ }
+ }
+
+ GstClockTime
+ gst_mpd_client_get_media_presentation_duration (GstMPDClient * client)
+ {
+ GstClockTime duration;
+
+ g_return_val_if_fail (client != NULL, GST_CLOCK_TIME_NONE);
+
+ if (client->mpd_root_node->mediaPresentationDuration != -1) {
+ duration = client->mpd_root_node->mediaPresentationDuration * GST_MSECOND;
+ } else {
+ /* We can only get the duration for on-demand streams */
+ duration = GST_CLOCK_TIME_NONE;
+ }
+
+ return duration;
+ }
+
+ gboolean
+ gst_mpd_client_set_period_id (GstMPDClient * client, const gchar * period_id)
+ {
+ GstStreamPeriod *next_stream_period;
+ gboolean ret = FALSE;
+ GList *iter;
+ guint period_idx;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->periods != NULL, FALSE);
+ g_return_val_if_fail (period_id != NULL, FALSE);
+
+ if (!gst_mpd_client_setup_media_presentation (client, GST_CLOCK_TIME_NONE, -1,
+ period_id))
+ return FALSE;
+
+ for (period_idx = 0, iter = client->periods; iter;
+ period_idx++, iter = g_list_next (iter)) {
+ next_stream_period = iter->data;
+
+ if (next_stream_period->period->id
+ && strcmp (next_stream_period->period->id, period_id) == 0) {
+ ret = TRUE;
+ client->period_idx = period_idx;
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ gboolean
+ gst_mpd_client_set_period_index (GstMPDClient * client, guint period_idx)
+ {
+ GstStreamPeriod *next_stream_period;
+ gboolean ret = FALSE;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->periods != NULL, FALSE);
+
+ if (!gst_mpd_client_setup_media_presentation (client, -1, period_idx, NULL))
+ return FALSE;
+
+ next_stream_period = g_list_nth_data (client->periods, period_idx);
+ if (next_stream_period != NULL) {
+ client->period_idx = period_idx;
+ ret = TRUE;
+ }
+
+ return ret;
+ }
+
+ guint
+ gst_mpd_client_get_period_index (GstMPDClient * client)
+ {
+ guint period_idx;
+
+ g_return_val_if_fail (client != NULL, 0);
+ period_idx = client->period_idx;
+
+ return period_idx;
+ }
+
+ const gchar *
+ gst_mpd_client_get_period_id (GstMPDClient * client)
+ {
+ GstStreamPeriod *period;
+ gchar *period_id = NULL;
+
+ g_return_val_if_fail (client != NULL, 0);
+ period = g_list_nth_data (client->periods, client->period_idx);
+ if (period && period->period)
+ period_id = period->period->id;
+
+ return period_id;
+ }
+
+ gboolean
+ gst_mpd_client_has_next_period (GstMPDClient * client)
+ {
+ GList *next_stream_period;
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->periods != NULL, FALSE);
+
+ if (!gst_mpd_client_setup_media_presentation (client, GST_CLOCK_TIME_NONE,
+ client->period_idx + 1, NULL))
+ return FALSE;
+
+ next_stream_period =
+ g_list_nth_data (client->periods, client->period_idx + 1);
+ return next_stream_period != NULL;
+ }
+
+ gboolean
+ gst_mpd_client_has_previous_period (GstMPDClient * client)
+ {
+ GList *next_stream_period;
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->periods != NULL, FALSE);
+
+ if (!gst_mpd_client_setup_media_presentation (client, GST_CLOCK_TIME_NONE,
+ client->period_idx - 1, NULL))
+ return FALSE;
+
+ next_stream_period =
+ g_list_nth_data (client->periods, client->period_idx - 1);
+
+ return next_stream_period != NULL;
+ }
+
+ gint
+ gst_mpd_client_get_rep_idx_with_min_bandwidth (GList * Representations)
+ {
+ GList *list = NULL, *lowest = NULL;
+ GstMPDRepresentationNode *rep = NULL;
+ gint lowest_bandwidth = -1;
+
+ if (Representations == NULL)
+ return -1;
+
+ for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+ rep = (GstMPDRepresentationNode *) list->data;
+ if (rep && (!lowest || rep->bandwidth < lowest_bandwidth)) {
+ lowest = list;
+ lowest_bandwidth = rep->bandwidth;
+ }
+ }
+
+ return lowest ? g_list_position (Representations, lowest) : -1;
+ }
+
+ gint
+ gst_mpd_client_get_rep_idx_with_max_bandwidth (GList * Representations,
+ gint64 max_bandwidth, gint max_video_width, gint max_video_height, gint
+ max_video_framerate_n, gint max_video_framerate_d)
+ {
+ GList *list = NULL, *best = NULL;
+ GstMPDRepresentationNode *representation;
+ gint best_bandwidth = 0;
+
+ GST_DEBUG ("max_bandwidth = %" G_GINT64_FORMAT, max_bandwidth);
+
+ if (Representations == NULL)
+ return -1;
+
+ if (max_bandwidth <= 0) /* 0 => get lowest representation available */
+ return gst_mpd_client_get_rep_idx_with_min_bandwidth (Representations);
+
+ for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+ GstXMLFrameRate *framerate = NULL;
+
+ representation = (GstMPDRepresentationNode *) list->data;
+
+ /* FIXME: Really? */
+ if (!representation)
+ continue;
+
+ framerate = GST_MPD_REPRESENTATION_BASE_NODE (representation)->frameRate;
+ if (!framerate)
+ framerate =
+ GST_MPD_REPRESENTATION_BASE_NODE (representation)->maxFrameRate;
+
+ if (framerate && max_video_framerate_n > 0) {
+ if (gst_util_fraction_compare (framerate->num, framerate->den,
+ max_video_framerate_n, max_video_framerate_d) > 0)
+ continue;
+ }
+
+ if (max_video_width > 0
+ && GST_MPD_REPRESENTATION_BASE_NODE (representation)->width >
+ max_video_width)
+ continue;
+ if (max_video_height > 0
+ && GST_MPD_REPRESENTATION_BASE_NODE (representation)->height >
+ max_video_height)
+ continue;
+
+ if (representation->bandwidth <= max_bandwidth &&
+ representation->bandwidth > best_bandwidth) {
+ best = list;
+ best_bandwidth = representation->bandwidth;
+ }
+ }
+
+ return best ? g_list_position (Representations, best) : -1;
+ }
+
+ void
+ gst_mpd_client_seek_to_first_segment (GstMPDClient * client)
+ {
+ GList *list;
+
+ g_return_if_fail (client != NULL);
+ g_return_if_fail (client->active_streams != NULL);
+
+ for (list = g_list_first (client->active_streams); list;
+ list = g_list_next (list)) {
+ GstActiveStream *stream = (GstActiveStream *) list->data;
+ if (stream) {
+ stream->segment_index = 0;
+ stream->segment_repeat_index = 0;
+ }
+ }
+ }
+
+ static guint
+ gst_mpd_client_get_segments_counts (GstMPDClient * client,
+ GstActiveStream * stream)
+ {
+ GstStreamPeriod *stream_period;
+
+ g_return_val_if_fail (stream != NULL, 0);
+
+ if (stream->segments)
+ return stream->segments->len;
+ g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+ (stream->cur_seg_template)->SegmentTimeline == NULL, 0);
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ if (stream_period->duration != -1)
+ return gst_util_uint64_scale_ceil (stream_period->duration, 1,
+ gst_mpd_client_get_segment_duration (client, stream, NULL));
+
+ return 0;
+ }
+
+ gboolean
+ gst_mpd_client_is_live (GstMPDClient * client)
+ {
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ return client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC;
+ }
+
+ guint
+ gst_mpd_client_get_nb_active_stream (GstMPDClient * client)
+ {
+ g_return_val_if_fail (client != NULL, 0);
+
+ return g_list_length (client->active_streams);
+ }
+
+ guint
+ gst_mpd_client_get_nb_adaptationSet (GstMPDClient * client)
+ {
+ GstStreamPeriod *stream_period;
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, 0);
+ g_return_val_if_fail (stream_period->period != NULL, 0);
+
+ return g_list_length (stream_period->period->AdaptationSets);
+ }
+
+ GstActiveStream *
+ gst_mpd_client_get_active_stream_by_index (GstMPDClient * client,
+ guint stream_idx)
+ {
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->active_streams != NULL, NULL);
+
+ return g_list_nth_data (client->active_streams, stream_idx);
+ }
+
+ gboolean
+ gst_mpd_client_active_stream_contains_subtitles (GstActiveStream * stream)
+ {
+ const gchar *mimeType;
+ const gchar *adapt_set_codecs;
+ const gchar *rep_codecs;
+
+ mimeType =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->mimeType;
+ if (!mimeType)
+ mimeType =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->mimeType;
+
+ if (g_strcmp0 (mimeType, "application/ttml+xml") == 0 ||
+ g_strcmp0 (mimeType, "text/vtt") == 0)
+ return TRUE;
+
+ adapt_set_codecs =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->codecs;
+ rep_codecs =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->codecs;
+
+ return (adapt_set_codecs && g_str_has_prefix (adapt_set_codecs, "stpp"))
+ || (rep_codecs && g_str_has_prefix (rep_codecs, "stpp"));
+ }
+
+ GstCaps *
+ gst_mpd_client_get_stream_caps (GstActiveStream * stream)
+ {
+ const gchar *mimeType, *caps_string;
+ GstCaps *ret = NULL;
+
+ if (stream == NULL || stream->cur_adapt_set == NULL
+ || stream->cur_representation == NULL)
+ return NULL;
+
+ mimeType =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->mimeType;
+ if (mimeType == NULL) {
+ mimeType =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->mimeType;
+ }
+
+ caps_string = gst_mpd_helper_mimetype_to_caps (mimeType);
+
+ if ((g_strcmp0 (caps_string, "application/mp4") == 0)
+ && gst_mpd_client_active_stream_contains_subtitles (stream))
+ caps_string = "video/quicktime";
+
+ if (caps_string)
+ ret = gst_caps_from_string (caps_string);
+
+ return ret;
+ }
+
+ gboolean
+ gst_mpd_client_get_bitstream_switching_flag (GstActiveStream * stream)
+ {
+ if (stream == NULL || stream->cur_adapt_set == NULL)
+ return FALSE;
+
+ return stream->cur_adapt_set->bitstreamSwitching;
+ }
+
+ guint
+ gst_mpd_client_get_video_stream_width (GstActiveStream * stream)
+ {
+ guint width;
+
+ if (stream == NULL || stream->cur_adapt_set == NULL
+ || stream->cur_representation == NULL)
+ return 0;
+
+ width = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->width;
+ if (width == 0) {
+ width = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->width;
+ }
+
+ return width;
+ }
+
+ guint
+ gst_mpd_client_get_video_stream_height (GstActiveStream * stream)
+ {
+ guint height;
+
+ if (stream == NULL || stream->cur_adapt_set == NULL
+ || stream->cur_representation == NULL)
+ return 0;
+
+ height =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->height;
+ if (height == 0) {
+ height = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->height;
+ }
+
+ return height;
+ }
+
+ gboolean
+ gst_mpd_client_get_video_stream_framerate (GstActiveStream * stream,
+ gint * fps_num, gint * fps_den)
+ {
+ if (stream == NULL)
+ return FALSE;
+
+ if (stream->cur_adapt_set &&
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->frameRate !=
+ NULL) {
+ *fps_num =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+ frameRate->num;
+ *fps_den =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+ frameRate->den;
+ return TRUE;
+ }
+
+ if (stream->cur_adapt_set &&
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->maxFrameRate !=
+ NULL) {
+ *fps_num =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+ maxFrameRate->num;
+ *fps_den =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+ maxFrameRate->den;
+ return TRUE;
+ }
+
+ if (stream->cur_representation &&
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->frameRate != NULL) {
+ *fps_num =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->frameRate->num;
+ *fps_den =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->frameRate->den;
+ return TRUE;
+ }
+
+ if (stream->cur_representation &&
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->maxFrameRate != NULL) {
+ *fps_num =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->maxFrameRate->num;
+ *fps_den =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->maxFrameRate->den;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ guint
+ gst_mpd_client_get_audio_stream_rate (GstActiveStream * stream)
+ {
+ const gchar *rate;
+
+ if (stream == NULL || stream->cur_adapt_set == NULL
+ || stream->cur_representation == NULL)
+ return 0;
+
+ rate =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_representation)->audioSamplingRate;
+ if (rate == NULL) {
+ rate =
+ GST_MPD_REPRESENTATION_BASE_NODE (stream->
+ cur_adapt_set)->audioSamplingRate;
+ }
+
+ return rate ? atoi (rate) : 0;
+ }
+
+ guint
+ gst_mpd_client_get_audio_stream_num_channels (GstActiveStream * stream)
+ {
+ if (stream == NULL || stream->cur_adapt_set == NULL
+ || stream->cur_representation == NULL)
+ return 0;
+ /* TODO: here we have to parse the AudioChannelConfiguration descriptors */
+ return 0;
+ }
+
+ guint
+ gst_mpd_client_get_list_and_nb_of_audio_language (GstMPDClient * client,
+ GList ** lang)
+ {
+ GstStreamPeriod *stream_period;
+ GstMPDAdaptationSetNode *adapt_set;
+ GList *adaptation_sets, *list;
+ const gchar *this_mimeType = "audio";
+ gchar *mimeType = NULL;
+ guint nb_adaptation_set = 0;
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ g_return_val_if_fail (stream_period != NULL, 0);
+ g_return_val_if_fail (stream_period->period != NULL, 0);
+
+ adaptation_sets =
+ gst_mpd_client_get_adaptation_sets_for_period (client, stream_period);
+ for (list = adaptation_sets; list; list = g_list_next (list)) {
+ adapt_set = (GstMPDAdaptationSetNode *) list->data;
+ if (adapt_set && adapt_set->lang) {
+ gchar *this_lang = adapt_set->lang;
+ GstMPDRepresentationNode *rep;
+ rep =
+ gst_mpd_client_get_lowest_representation (adapt_set->Representations);
+ mimeType = NULL;
+ if (GST_MPD_REPRESENTATION_BASE_NODE (rep))
+ mimeType = GST_MPD_REPRESENTATION_BASE_NODE (rep)->mimeType;
+ if (!mimeType && GST_MPD_REPRESENTATION_BASE_NODE (adapt_set)) {
+ mimeType = GST_MPD_REPRESENTATION_BASE_NODE (adapt_set)->mimeType;
+ }
+
+ if (gst_mpd_helper_strncmp_ext (mimeType, this_mimeType) == 0) {
+ nb_adaptation_set++;
+ *lang = g_list_append (*lang, this_lang);
+ }
+ }
+ }
+
+ return nb_adaptation_set;
+ }
+
+
+ GstDateTime *
+ gst_mpd_client_get_next_segment_availability_start_time (GstMPDClient * client,
+ GstActiveStream * stream)
+ {
+ GstDateTime *availability_start_time, *rv;
+ gint seg_idx;
+ GstMediaSegment *segment;
+ GstClockTime segmentEndTime;
+ const GstStreamPeriod *stream_period;
+ GstClockTime period_start = 0;
+
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (stream != NULL, NULL);
+
+ stream_period = gst_mpd_client_get_stream_period (client);
+ if (stream_period && stream_period->period) {
+ period_start = stream_period->start;
+ }
+
+ seg_idx = stream->segment_index;
+
+ if (stream->segments) {
+ segment = g_ptr_array_index (stream->segments, seg_idx);
+
+ if (segment->repeat >= 0) {
+ segmentEndTime = segment->start + (stream->segment_repeat_index + 1) *
+ segment->duration;
+ } else if (seg_idx < stream->segments->len - 1) {
+ const GstMediaSegment *next_segment =
+ g_ptr_array_index (stream->segments, seg_idx + 1);
+ segmentEndTime = next_segment->start;
+ } else {
+ g_return_val_if_fail (stream_period != NULL, NULL);
+ segmentEndTime = period_start + stream_period->duration;
+ }
+ } else {
+ GstClockTime seg_duration;
+ seg_duration = gst_mpd_client_get_segment_duration (client, stream, NULL);
+ if (seg_duration == 0)
+ return NULL;
+ segmentEndTime = period_start + (1 + seg_idx) * seg_duration;
+ }
+
+ availability_start_time = gst_mpd_client_get_availability_start_time (client);
+ if (availability_start_time == NULL) {
+ GST_WARNING_OBJECT (client, "Failed to get availability_start_time");
+ return NULL;
+ }
+
+ rv = gst_mpd_client_add_time_difference (availability_start_time,
+ segmentEndTime / GST_USECOND);
+ gst_date_time_unref (availability_start_time);
+ if (rv == NULL) {
+ GST_WARNING_OBJECT (client, "Failed to offset availability_start_time");
+ return NULL;
+ }
+
+ return rv;
+ }
+
+ gboolean
+ gst_mpd_client_seek_to_time (GstMPDClient * client, GDateTime * time)
+ {
+ GDateTime *start;
+ GTimeSpan ts_microseconds;
+ GstClockTime ts;
+ gboolean ret = TRUE;
+ GList *stream;
+
+ g_return_val_if_fail (gst_mpd_client_is_live (client), FALSE);
+ g_return_val_if_fail (client->mpd_root_node->availabilityStartTime != NULL,
+ FALSE);
+
+ start =
+ gst_date_time_to_g_date_time (client->mpd_root_node->
+ availabilityStartTime);
+
+ ts_microseconds = g_date_time_difference (time, start);
+ g_date_time_unref (start);
+
+ /* Clamp to availability start time, otherwise calculations wrap around */
+ if (ts_microseconds < 0)
+ ts_microseconds = 0;
+
+ ts = ts_microseconds * GST_USECOND;
+ for (stream = client->active_streams; stream; stream = g_list_next (stream)) {
+ ret =
+ ret & gst_mpd_client_stream_seek (client, stream->data, TRUE, 0, ts,
+ NULL);
+ }
+ return ret;
+ }
+
+ gboolean
+ gst_mpd_client_has_isoff_ondemand_profile (GstMPDClient * client)
+ {
+ return client->profile_isoff_ondemand;
+ }
+
+ /**
+ * gst_mpd_client_parse_default_presentation_delay:
+ * @client: #GstMPDClient that has a parsed manifest
+ * @default_presentation_delay: A string that specifies a time period
+ * in fragments (e.g. "5 f"), seconds ("12 s") or milliseconds
+ * ("12000 ms")
+ * Returns: the parsed string in milliseconds
+ *
+ * Since: 1.6
+ */
+ gint64
+ gst_mpd_client_parse_default_presentation_delay (GstMPDClient * client,
+ const gchar * default_presentation_delay)
+ {
+ gint64 value;
+ char *endptr = NULL;
+
+ g_return_val_if_fail (client != NULL, 0);
+ g_return_val_if_fail (default_presentation_delay != NULL, 0);
+ value = strtol (default_presentation_delay, &endptr, 10);
+ if (endptr == default_presentation_delay || value == 0) {
+ return 0;
+ }
+ while (*endptr == ' ')
+ endptr++;
+ if (*endptr == 's' || *endptr == 'S') {
+ value *= 1000; /* convert to ms */
+ } else if (*endptr == 'f' || *endptr == 'F') {
+ gint64 segment_duration;
+ g_assert (client->mpd_root_node != NULL);
+ segment_duration = client->mpd_root_node->maxSegmentDuration;
+ value *= segment_duration;
+ } else if (*endptr != 'm' && *endptr != 'M') {
+ GST_ERROR ("Unable to parse default presentation delay: %s",
+ default_presentation_delay);
+ value = 0;
+ }
+ return value;
+ }
+
+ GstClockTime
+ gst_mpd_client_get_maximum_segment_duration (GstMPDClient * client)
+ {
+ GstClockTime ret = GST_CLOCK_TIME_NONE, dur;
+ GList *stream;
+
+ g_return_val_if_fail (client != NULL, GST_CLOCK_TIME_NONE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, GST_CLOCK_TIME_NONE);
+
+ if (client->mpd_root_node->maxSegmentDuration != GST_MPD_DURATION_NONE) {
+ return client->mpd_root_node->maxSegmentDuration * GST_MSECOND;
+ }
+
+ /* According to the DASH specification, if maxSegmentDuration is not present:
+ "If not present, then the maximum Segment duration shall be the maximum
+ duration of any Segment documented in this MPD"
+ */
+ for (stream = client->active_streams; stream; stream = g_list_next (stream)) {
+ dur = gst_mpd_client_get_segment_duration (client, stream->data, NULL);
+ if (dur != GST_CLOCK_TIME_NONE && (dur > ret || ret == GST_CLOCK_TIME_NONE)) {
+ ret = dur;
+ }
+ }
+ return ret;
+ }
+
+ guint
+ gst_mpd_client_get_period_index_at_time (GstMPDClient * client,
+ GstDateTime * time)
+ {
+ GList *iter;
+ guint period_idx = G_MAXUINT;
+ guint idx;
+ gint64 time_offset;
+ GstDateTime *avail_start =
+ gst_mpd_client_get_availability_start_time (client);
+ GstStreamPeriod *stream_period;
+
+ if (avail_start == NULL)
+ return 0;
+
+ time_offset = gst_mpd_client_calculate_time_difference (avail_start, time);
+ gst_date_time_unref (avail_start);
+
+ if (time_offset < 0)
+ return 0;
+
+ if (!gst_mpd_client_setup_media_presentation (client, time_offset, -1, NULL))
+ return 0;
+
+ for (idx = 0, iter = client->periods; iter; idx++, iter = g_list_next (iter)) {
+ stream_period = iter->data;
+ if (stream_period->start <= time_offset
+ && (!GST_CLOCK_TIME_IS_VALID (stream_period->duration)
+ || stream_period->start + stream_period->duration > time_offset)) {
+ period_idx = idx;
+ break;
+ }
+ }
+
+ return period_idx;
+ }
+
+ /* add or set node methods */
+
+ gboolean
+ gst_mpd_client_set_root_node (GstMPDClient * client,
+ const gchar * property_name, ...)
+ {
+ va_list myargs;
+ g_return_val_if_fail (client != NULL, FALSE);
+
+ if (!client->mpd_root_node)
+ client->mpd_root_node = gst_mpd_root_node_new ();
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (client->mpd_root_node), property_name, myargs);
+ va_end (myargs);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_mpd_client_add_baseurl_node (GstMPDClient * client,
+ const gchar * property_name, ...)
+ {
+ GstMPDBaseURLNode *baseurl_node = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ va_start (myargs, property_name);
+
+ baseurl_node = gst_mpd_baseurl_node_new ();
+ g_object_set_valist (G_OBJECT (baseurl_node), property_name, myargs);
+ client->mpd_root_node->BaseURLs =
+ g_list_append (client->mpd_root_node->BaseURLs, baseurl_node);
+
+ va_end (myargs);
+ return TRUE;
+ }
+
+ /* returns a period id */
+ gchar *
+ gst_mpd_client_set_period_node (GstMPDClient * client,
+ gchar * period_id, const gchar * property_name, ...)
+ {
+ GstMPDPeriodNode *period_node = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+
+ period_node =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ if (!period_node) {
+ period_node = gst_mpd_period_node_new ();
+ if (period_id)
+ period_node->id = g_strdup (period_id);
+ else
+ period_node->id =
+ _generate_new_string_id (client->mpd_root_node->Periods,
+ "period_%.2d", gst_mpd_client_get_period_with_id);
+ client->mpd_root_node->Periods =
+ g_list_append (client->mpd_root_node->Periods, period_node);
+ }
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (period_node), property_name, myargs);
+ va_end (myargs);
+
+ return period_node->id;
+ }
+
+ /* returns an adaptation set id */
+ guint
+ gst_mpd_client_set_adaptation_set_node (GstMPDClient * client,
+ gchar * period_id, guint adaptation_set_id, const gchar * property_name,
+ ...)
+ {
+ GstMPDAdaptationSetNode *adap_node = NULL;
+ GstMPDPeriodNode *period_node = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, 0);
+ g_return_val_if_fail (client->mpd_root_node != NULL, 0);
+
+ period_node =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ g_return_val_if_fail (period_node != NULL, 0);
+ adap_node =
+ GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client_get_adaptation_set_with_id
+ (period_node->AdaptationSets, adaptation_set_id));
+ if (!adap_node) {
+ adap_node = gst_mpd_adaptation_set_node_new ();
+ if (adaptation_set_id)
+ adap_node->id = adaptation_set_id;
+ else
+ adap_node->id =
+ _generate_new_id (period_node->AdaptationSets,
+ gst_mpd_client_get_adaptation_set_with_id);
+ GST_DEBUG_OBJECT (client, "Add a new adaptation set with id %d",
+ adap_node->id);
+ period_node->AdaptationSets =
+ g_list_append (period_node->AdaptationSets, adap_node);
+ }
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (adap_node), property_name, myargs);
+ va_end (myargs);
+
+ return adap_node->id;
+ }
+
+ /* returns a representation id */
+ gchar *
+ gst_mpd_client_set_representation_node (GstMPDClient * client,
+ gchar * period_id, guint adaptation_set_id, gchar * representation_id,
+ const gchar * property_name, ...)
+ {
+ GstMPDRepresentationNode *rep_node = NULL;
+ GstMPDAdaptationSetNode *adap_set_node = NULL;
+ GstMPDPeriodNode *period_node = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, NULL);
+ g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+
+ period_node =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ adap_set_node =
+ GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client_get_adaptation_set_with_id
+ (period_node->AdaptationSets, adaptation_set_id));
+ g_return_val_if_fail (adap_set_node != NULL, NULL);
+ rep_node =
+ GST_MPD_REPRESENTATION_NODE (gst_mpd_client_get_representation_with_id
+ (adap_set_node->Representations, representation_id));
+ if (!rep_node) {
+ rep_node = gst_mpd_representation_node_new ();
+ if (representation_id)
+ rep_node->id = g_strdup (representation_id);
+ else
+ rep_node->id =
+ _generate_new_string_id (adap_set_node->Representations,
+ "representation_%.2d", gst_mpd_client_get_representation_with_id);
+ GST_DEBUG_OBJECT (client, "Add a new representation with id %s",
+ rep_node->id);
+ adap_set_node->Representations =
+ g_list_append (adap_set_node->Representations, rep_node);
+ }
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (rep_node), property_name, myargs);
+ va_end (myargs);
+
+ return rep_node->id;
+ }
+
+ /* add/set a segment list node */
+ gboolean
+ gst_mpd_client_set_segment_list (GstMPDClient * client,
+ gchar * period_id, guint adap_set_id, gchar * rep_id,
+ const gchar * property_name, ...)
+ {
+ GstMPDRepresentationNode *representation = NULL;
+ GstMPDAdaptationSetNode *adaptation_set = NULL;
+ GstMPDPeriodNode *period = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ period =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ adaptation_set =
+ GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client_get_adaptation_set_with_id
+ (period->AdaptationSets, adap_set_id));
+ g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+ representation =
+ GST_MPD_REPRESENTATION_NODE (gst_mpd_client_get_representation_with_id
+ (adaptation_set->Representations, rep_id));
+ if (!representation->SegmentList) {
+ representation->SegmentList = gst_mpd_segment_list_node_new ();
+ }
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (representation->SegmentList), property_name,
+ myargs);
+ va_end (myargs);
+
+ return TRUE;
+ }
+
+ /* add/set a segment template node */
+ gboolean
+ gst_mpd_client_set_segment_template (GstMPDClient * client,
+ gchar * period_id, guint adap_set_id, gchar * rep_id,
+ const gchar * property_name, ...)
+ {
+ GstMPDRepresentationNode *representation = NULL;
+ GstMPDAdaptationSetNode *adaptation_set = NULL;
+ GstMPDPeriodNode *period = NULL;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ period =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ adaptation_set =
+ GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client_get_adaptation_set_with_id
+ (period->AdaptationSets, adap_set_id));
+ g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+ representation =
+ GST_MPD_REPRESENTATION_NODE (gst_mpd_client_get_representation_with_id
+ (adaptation_set->Representations, rep_id));
+ if (!representation->SegmentTemplate) {
+ representation->SegmentTemplate = gst_mpd_segment_template_node_new ();
+ }
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (representation->SegmentTemplate),
+ property_name, myargs);
+ va_end (myargs);
+
+ return TRUE;
+ }
+
+ /* add a segmentURL node with to a SegmentList node */
+ gboolean
+ gst_mpd_client_add_segment_url (GstMPDClient * client,
+ gchar * period_id, guint adap_set_id, gchar * rep_id,
+ const gchar * property_name, ...)
+ {
+ GstMPDRepresentationNode *representation = NULL;
+ GstMPDAdaptationSetNode *adaptation_set = NULL;
+ GstMPDPeriodNode *period = NULL;
+ GstMPDSegmentURLNode *segment_url = NULL;
+ guint64 media_presentation_duration = 0;
+ va_list myargs;
+
+ g_return_val_if_fail (client != NULL, FALSE);
+ g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+ period =
+ GST_MPD_PERIOD_NODE (gst_mpd_client_get_period_with_id
+ (client->mpd_root_node->Periods, period_id));
+ adaptation_set =
+ GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client_get_adaptation_set_with_id
+ (period->AdaptationSets, adap_set_id));
+ g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+ representation =
+ GST_MPD_REPRESENTATION_NODE (gst_mpd_client_get_representation_with_id
+ (adaptation_set->Representations, rep_id));
+
+ if (!representation->SegmentList) {
+ representation->SegmentList = gst_mpd_segment_list_node_new ();
+ }
+
+ segment_url = gst_mpd_segment_url_node_new ();
+
+ va_start (myargs, property_name);
+ g_object_set_valist (G_OBJECT (segment_url), property_name, myargs);
+ va_end (myargs);
+
+ gst_mpd_segment_list_node_add_segment (representation->SegmentList,
+ segment_url);
+
+ /* Set the media presentation time according to the new segment duration added */
+ g_object_get (client->mpd_root_node, "media-presentation-duration",
+ &media_presentation_duration, NULL);
+ media_presentation_duration +=
+ GST_MPD_MULT_SEGMENT_BASE_NODE (representation->SegmentList)->duration;
+ g_object_set (client->mpd_root_node, "media-presentation-duration",
+ media_presentation_duration, NULL);
+
+ return TRUE;
+ }
--- /dev/null
+ /*
+ * DASH MPD parsing library
+ *
+ * gstmpdparser.h
+ *
+ * Copyright (C) 2012 STMicroelectronics
+ *
+ * Authors:
+ * Gianluca Gennari <gennarone@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+ #ifndef __GST_MPDPARSER_H__
+ #define __GST_MPDPARSER_H__
+
+ #include <gst/gst.h>
+ #include <gst/uridownloader/gsturidownloader.h>
+ #include <gst/base/gstadapter.h>
+ #include "gstmpdhelper.h"
+ #include "gstxmlhelper.h"
+ #include "gstmpdrootnode.h"
+ #include "gstmpdbaseurlnode.h"
+ #include "gstmpdutctimingnode.h"
+ #include "gstmpdmetricsnode.h"
+ #include "gstmpdmetricsrangenode.h"
+ #include "gstmpdsnode.h"
+ #include "gstmpdsegmenttimelinenode.h"
+ #include "gstmpdsegmenttemplatenode.h"
+ #include "gstmpdsegmenturlnode.h"
+ #include "gstmpdsegmentlistnode.h"
+ #include "gstmpdsegmentbasenode.h"
+ #include "gstmpdperiodnode.h"
+ #include "gstmpdrepresentationnode.h"
+ #include "gstmpdsubrepresentationnode.h"
+ #include "gstmpdcontentcomponentnode.h"
+ #include "gstmpdadaptationsetnode.h"
+ #include "gstmpdsubsetnode.h"
+ #include "gstmpdprograminformationnode.h"
+ #include "gstmpdlocationnode.h"
+ #include "gstmpdreportingnode.h"
+ #include "gstmpdurltypenode.h"
+ #include "gstmpddescriptortypenode.h"
+ #include "gstmpdrepresentationbasenode.h"
+ #include "gstmpdmultsegmentbasenode.h"
+
+ G_BEGIN_DECLS
+
+ typedef struct _GstActiveStream GstActiveStream;
+ typedef struct _GstStreamPeriod GstStreamPeriod;
+ typedef struct _GstMediaFragmentInfo GstMediaFragmentInfo;
+ typedef struct _GstMediaSegment GstMediaSegment;
+
+
+ #define GST_MPD_DURATION_NONE ((guint64)-1)
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++#define DEFAULT_ADAPTIVE_RETRY -1
++#define DEFAULT_ADAPTIVE_TIMEOUT -1
++#endif
++
+ typedef enum
+ {
+ GST_STREAM_UNKNOWN,
+ GST_STREAM_VIDEO, /* video stream (the main one) */
+ GST_STREAM_AUDIO, /* audio stream (optional) */
+ GST_STREAM_APPLICATION /* application stream (optional): for timed text/subtitles */
+ } GstStreamMimeType;
+
+ /**
+ * GstStreamPeriod:
+ *
+ * Stream period data structure
+ */
+ struct _GstStreamPeriod
+ {
+ GstMPDPeriodNode *period; /* Stream period */
+ guint number; /* Period number */
+ GstClockTime start; /* Period start time */
+ GstClockTime duration; /* Period duration */
+ };
+
+ /**
+ * GstMediaSegment:
+ *
+ * Media segment data structure
+ */
+ struct _GstMediaSegment
+ {
+ GstMPDSegmentURLNode *SegmentURL; /* this is NULL when using a SegmentTemplate */
+ guint number; /* segment number */
+ gint repeat; /* number of extra repetitions (0 = played only once) */
+ guint64 scale_start; /* start time in timescale units */
+ guint64 scale_duration; /* duration in timescale units */
+ GstClockTime start; /* segment start time */
+ GstClockTime duration; /* segment duration */
+ };
+
+ struct _GstMediaFragmentInfo
+ {
+ gchar *uri;
+ gint64 range_start;
+ gint64 range_end;
+
+ gchar *index_uri;
+ gint64 index_range_start;
+ gint64 index_range_end;
+
+ gboolean discontinuity;
+ GstClockTime timestamp;
+ GstClockTime duration;
+ };
+
+ /**
+ * GstActiveStream:
+ *
+ * Active stream data structure
+ */
+ struct _GstActiveStream
+ {
+ GstStreamMimeType mimeType; /* video/audio/application */
+
+ guint baseURL_idx; /* index of the baseURL used for last request */
+ gchar *baseURL; /* active baseURL used for last request */
+ gchar *queryURL; /* active queryURL used for last request */
+ guint max_bandwidth; /* max bandwidth allowed for this mimeType */
+
+ GstMPDAdaptationSetNode *cur_adapt_set; /* active adaptation set */
+ gint representation_idx; /* index of current representation */
+ GstMPDRepresentationNode *cur_representation; /* active representation */
+ GstMPDSegmentBaseNode *cur_segment_base; /* active segment base */
+ GstMPDSegmentListNode *cur_segment_list; /* active segment list */
+ GstMPDSegmentTemplateNode *cur_seg_template; /* active segment template */
+ gint segment_index; /* index of next sequence chunk */
+ guint segment_repeat_index; /* index of the repeat count of a segment */
+ GPtrArray *segments; /* array of GstMediaSegment */
+ GstClockTime presentationTimeOffset; /* presentation time offset of the current segment */
+ };
+
+ /* MPD file parsing */
+ gboolean gst_mpdparser_get_mpd_root_node (GstMPDRootNode ** mpd_root_node, const gchar * data, gint size);
+ GstMPDSegmentListNode * gst_mpdparser_get_external_segment_list (const gchar * data, gint size, GstMPDSegmentListNode * parent);
+ GList * gst_mpdparser_get_external_periods (const gchar * data, gint size);
+ GList * gst_mpdparser_get_external_adaptation_sets (const gchar * data, gint size, GstMPDPeriodNode* period);
+
+ /* navigation functions */
+ GstStreamMimeType gst_mpdparser_representation_get_mimetype (GstMPDAdaptationSetNode * adapt_set, GstMPDRepresentationNode * rep);
+
+ /* Memory management */
+ void gst_mpdparser_free_stream_period (GstStreamPeriod * stream_period);
+ void gst_mpdparser_free_media_segment (GstMediaSegment * media_segment);
+ void gst_mpdparser_free_active_stream (GstActiveStream * active_stream);
+ void gst_mpdparser_media_fragment_info_clear (GstMediaFragmentInfo * fragment);
+ /* Active stream methods*/
+ void gst_mpdparser_init_active_stream_segments (GstActiveStream * stream);
+ gchar *gst_mpdparser_get_mediaURL (GstActiveStream * stream, GstMPDSegmentURLNode * segmentURL);
+ const gchar *gst_mpdparser_get_initializationURL (GstActiveStream * stream, GstMPDURLTypeNode * InitializationURL);
+ gchar *gst_mpdparser_build_URL_from_template (const gchar * url_template, const gchar * id, guint number, guint bandwidth, guint64 time);
+
+ G_END_DECLS
+
+ #endif /* __GST_MPDPARSER_H__ */
+
--- /dev/null
-openssl_dep = dependency('openssl', version : '>= 1.0.1', required : get_option('dtls'))
-libcrypto_dep = dependency('libcrypto', required : get_option('dtls'))
+ dtls_sources = [
+ 'gstdtlsagent.c',
+ 'gstdtlscertificate.c',
+ 'gstdtlsconnection.c',
+ 'gstdtlsdec.c',
+ 'gstdtlsenc.c',
+ 'gstdtlssrtpbin.c',
+ 'gstdtlssrtpdec.c',
+ 'gstdtlssrtpdemux.c',
+ 'gstdtlssrtpenc.c',
+ 'plugin.c',
+ 'gstdtlselement.c',
+ ]
+
++openssl_dep = dependency('openssl1.1', version : '>= 1.0.1', required : get_option('dtls'))
++libcrypto_dep = dependency('libcrypto1.1', required : get_option('dtls'))
+
+ if openssl_dep.found() and libcrypto_dep.found()
+ gstdtls = library('gstdtls',
+ dtls_sources,
+ c_args : gst_plugins_bad_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, libcrypto_dep, openssl_dep] + winsock2,
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gstdtls, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gstdtls]
+ endif
--- /dev/null
- media->mtype == GST_HLS_MEDIA_TYPE_VIDEO) {
+ /* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * Gsthlsdemux.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-hlsdemux
+ * @title: hlsdemux
+ *
+ * HTTP Live Streaming demuxer element.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink
+ * ]|
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <gst/base/gsttypefindhelper.h>
+ #include "gsthlselements.h"
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++#include <gst/tag/tag.h>
++#endif
+ #include "gsthlsdemux.h"
+
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-hls"));
+
+ GST_DEBUG_CATEGORY (gst_hls_demux_debug);
+ #define GST_CAT_DEFAULT gst_hls_demux_debug
+
+ #define GST_M3U8_CLIENT_LOCK(l) /* FIXME */
+ #define GST_M3U8_CLIENT_UNLOCK(l) /* FIXME */
+
++#ifdef TIZEN_FEATURE_HLSDEMUX_PROPERTY
++enum
++{
++ PROP_0,
++ PROP_IS_LIVE,
++ PROP_LIVE_START_TIME,
++ PROP_LIVE_END_TIME,
++ PROP_LAST
++};
++#endif
++
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT
++#define ABSDIFF(a,b) ((a) < (b) ? (b) - (a) : (a) - (b))
++#endif
++
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE
++#define DEFAULT_FAILED_COUNT 3
++#endif
++
+ /* GObject */
+ static void gst_hls_demux_finalize (GObject * obj);
++#ifdef TIZEN_FEATURE_HLSDEMUX_PROPERTY
++static void
++gst_hls_demux_get_property (GObject * object, guint prop_id,
++ GValue * value, GParamSpec * pspec);
++#endif
+
+ /* GstElement */
+ static GstStateChangeReturn
+ gst_hls_demux_change_state (GstElement * element, GstStateChange transition);
+
+ /* GstHLSDemux */
+ static gboolean gst_hls_demux_update_playlist (GstHLSDemux * demux,
+ gboolean update, GError ** err);
+ static gchar *gst_hls_src_buf_to_utf8_playlist (GstBuffer * buf);
+
+ /* FIXME: the return value is never used? */
+ static gboolean gst_hls_demux_change_playlist (GstHLSDemux * demux,
+ guint max_bitrate, gboolean * changed);
+ static GstBuffer *gst_hls_demux_decrypt_fragment (GstHLSDemux * demux,
+ GstHLSDemuxStream * stream, GstBuffer * encrypted_buffer, GError ** err);
+ static gboolean
+ gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+ const guint8 * key_data, const guint8 * iv_data);
+ static void gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream);
+
+ static gboolean gst_hls_demux_is_live (GstAdaptiveDemux * demux);
+ static GstClockTime gst_hls_demux_get_duration (GstAdaptiveDemux * demux);
+ static gint64 gst_hls_demux_get_manifest_update_interval (GstAdaptiveDemux *
+ demux);
+ static gboolean gst_hls_demux_process_manifest (GstAdaptiveDemux * demux,
+ GstBuffer * buf);
+ static GstFlowReturn gst_hls_demux_update_manifest (GstAdaptiveDemux * demux);
+ static gboolean gst_hls_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek);
+ static GstFlowReturn gst_hls_demux_stream_seek (GstAdaptiveDemuxStream *
+ stream, gboolean forward, GstSeekFlags flags, GstClockTime ts,
+ GstClockTime * final_ts);
+ static gboolean
+ gst_hls_demux_start_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn gst_hls_demux_data_received (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer);
+ static void gst_hls_demux_stream_free (GstAdaptiveDemuxStream * stream);
+ static gboolean gst_hls_demux_stream_has_next_fragment (GstAdaptiveDemuxStream *
+ stream);
+ static GstFlowReturn gst_hls_demux_advance_fragment (GstAdaptiveDemuxStream *
+ stream);
+ static GstFlowReturn gst_hls_demux_update_fragment_info (GstAdaptiveDemuxStream
+ * stream);
+ static gboolean gst_hls_demux_select_bitrate (GstAdaptiveDemuxStream * stream,
+ guint64 bitrate);
+ static void gst_hls_demux_reset (GstAdaptiveDemux * demux);
+ static gboolean gst_hls_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+ gint64 * start, gint64 * stop);
+ static GstM3U8 *gst_hls_demux_stream_get_m3u8 (GstHLSDemuxStream * hls_stream);
+ static void gst_hls_demux_set_current_variant (GstHLSDemux * hlsdemux,
+ GstHLSVariantStream * variant);
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++static gboolean gst_hlsdemux_set_stream_event (GstAdaptiveDemuxStream * stream,
++ GstHLSMedia * media);
++static gboolean gst_hlsdemux_set_language_tags (GstAdaptiveDemuxStream * stream,
++ const gchar * language);
++#endif
+
+ #define gst_hls_demux_parent_class parent_class
+ G_DEFINE_TYPE (GstHLSDemux, gst_hls_demux, GST_TYPE_ADAPTIVE_DEMUX);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (hlsdemux, "hlsdemux", GST_RANK_PRIMARY,
+ GST_TYPE_HLS_DEMUX, hls_element_init (plugin));
+
+ static void
+ gst_hls_demux_finalize (GObject * obj)
+ {
+ GstHLSDemux *demux = GST_HLS_DEMUX (obj);
+
+ gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+ g_mutex_clear (&demux->keys_lock);
+ if (demux->keys) {
+ g_hash_table_unref (demux->keys);
+ demux->keys = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+ }
+
+ static void
+ gst_hls_demux_class_init (GstHLSDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstAdaptiveDemuxClass *adaptivedemux_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ adaptivedemux_class = (GstAdaptiveDemuxClass *) klass;
+
++#ifdef TIZEN_FEATURE_HLSDEMUX_PROPERTY
++ gobject_class->get_property = gst_hls_demux_get_property;
++#endif
+ gobject_class->finalize = gst_hls_demux_finalize;
+
++#ifdef TIZEN_FEATURE_HLSDEMUX_PROPERTY
++ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
++ g_param_spec_boolean ("is-live", "is-live", "Whether the source is live",
++ FALSE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_LIVE_START_TIME,
++ g_param_spec_uint64 ("live-start-time",
++ "start time of the first fragment",
++ "start time of the first fragment in the current media playlist in case of live",
++ 0, G_MAXUINT64, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_LIVE_END_TIME,
++ g_param_spec_uint64 ("live-end-time", "end time of the last fragment",
++ "end time of the last fragment in the current media playlist in case of live",
++ 0, G_MAXUINT64, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
++#endif
++
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_hls_demux_change_state);
+
+ gst_element_class_add_static_pad_template (element_class, &srctemplate);
+ gst_element_class_add_static_pad_template (element_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (element_class,
+ "HLS Demuxer",
+ "Codec/Demuxer/Adaptive",
+ "HTTP Live Streaming demuxer",
+ "Marc-Andre Lureau <marcandre.lureau@gmail.com>\n"
+ "Andoni Morales Alastruey <ylatuya@gmail.com>");
+
+ adaptivedemux_class->is_live = gst_hls_demux_is_live;
+ adaptivedemux_class->get_live_seek_range = gst_hls_demux_get_live_seek_range;
+ adaptivedemux_class->get_duration = gst_hls_demux_get_duration;
+ adaptivedemux_class->get_manifest_update_interval =
+ gst_hls_demux_get_manifest_update_interval;
+ adaptivedemux_class->process_manifest = gst_hls_demux_process_manifest;
+ adaptivedemux_class->update_manifest = gst_hls_demux_update_manifest;
+ adaptivedemux_class->reset = gst_hls_demux_reset;
+ adaptivedemux_class->seek = gst_hls_demux_seek;
+ adaptivedemux_class->stream_seek = gst_hls_demux_stream_seek;
+ adaptivedemux_class->stream_has_next_fragment =
+ gst_hls_demux_stream_has_next_fragment;
+ adaptivedemux_class->stream_advance_fragment = gst_hls_demux_advance_fragment;
+ adaptivedemux_class->stream_update_fragment_info =
+ gst_hls_demux_update_fragment_info;
+ adaptivedemux_class->stream_select_bitrate = gst_hls_demux_select_bitrate;
+ adaptivedemux_class->stream_free = gst_hls_demux_stream_free;
+
+ adaptivedemux_class->start_fragment = gst_hls_demux_start_fragment;
+ adaptivedemux_class->finish_fragment = gst_hls_demux_finish_fragment;
+ adaptivedemux_class->data_received = gst_hls_demux_data_received;
+
+ GST_DEBUG_CATEGORY_INIT (gst_hls_demux_debug, "hlsdemux", 0,
+ "hlsdemux element");
+ }
+
+ static void
+ gst_hls_demux_init (GstHLSDemux * demux)
+ {
+ gst_adaptive_demux_set_stream_struct_size (GST_ADAPTIVE_DEMUX_CAST (demux),
+ sizeof (GstHLSDemuxStream));
+
+ demux->keys = g_hash_table_new_full (g_str_hash, g_str_equal, g_free, g_free);
+ g_mutex_init (&demux->keys_lock);
+ }
+
++#ifdef TIZEN_FEATURE_HLSDEMUX_PROPERTY
++static void
++gst_hls_demux_get_property (GObject * object, guint prop_id,
++ GValue * value, GParamSpec * pspec)
++{
++ GstAdaptiveDemux *adaptivedemux = GST_ADAPTIVE_DEMUX (object);
++ GstHLSDemux *demux = GST_HLS_DEMUX (adaptivedemux);
++ GstHLSVariantStream *variant = demux->current_variant;
++ gboolean is_live = FALSE;
++
++ if (variant)
++ is_live = gst_hls_variant_stream_is_live (variant);
++
++ switch (prop_id) {
++ case PROP_IS_LIVE:
++ g_value_set_boolean (value, is_live);
++ break;
++ case PROP_LIVE_START_TIME:{
++ GstClockTime start_time = 0;
++ if (is_live)
++ start_time = variant->m3u8->first_file_start;
++ g_value_set_uint64 (value, start_time);
++ break;
++ }
++ case PROP_LIVE_END_TIME:{
++ GstClockTime end_time = 0;
++ if (is_live)
++ end_time = variant->m3u8->last_file_end;
++ g_value_set_uint64 (value, end_time);
++ break;
++ }
++ default:
++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
++ break;
++ }
++}
++#endif
++
+ static GstStateChangeReturn
+ gst_hls_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstHLSDemux *demux = GST_HLS_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+ g_hash_table_remove_all (demux->keys);
+ break;
+ default:
+ break;
+ }
+ return ret;
+ }
+
+ static GstPad *
+ gst_hls_demux_create_pad (GstHLSDemux * hlsdemux)
+ {
+ gchar *name;
+ GstPad *pad;
+
+ name = g_strdup_printf ("src_%u", hlsdemux->srcpad_counter++);
+ pad = gst_pad_new_from_static_template (&srctemplate, name);
+ g_free (name);
+
+ return pad;
+ }
+
+ static guint64
+ gst_hls_demux_get_bitrate (GstHLSDemux * hlsdemux)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (hlsdemux);
+
+ /* FIXME !!!
+ *
+ * No, there isn't a single output :D */
+
+ /* Valid because hlsdemux only has a single output */
+ if (demux->streams) {
+ GstAdaptiveDemuxStream *stream = demux->streams->data;
+ return stream->current_download_rate;
+ }
+
+ return 0;
+ }
+
+ static void
+ gst_hls_demux_stream_clear_pending_data (GstHLSDemuxStream * hls_stream)
+ {
+ if (hls_stream->pending_encrypted_data)
+ gst_adapter_clear (hls_stream->pending_encrypted_data);
+ gst_buffer_replace (&hls_stream->pending_decrypted_buffer, NULL);
+ gst_buffer_replace (&hls_stream->pending_typefind_buffer, NULL);
+ gst_buffer_replace (&hls_stream->pending_pcr_buffer, NULL);
+ hls_stream->current_offset = -1;
+ gst_hls_demux_stream_decrypt_end (hls_stream);
+ }
+
+ static void
+ gst_hls_demux_clear_all_pending_data (GstHLSDemux * hlsdemux)
+ {
+ GstAdaptiveDemux *demux = (GstAdaptiveDemux *) hlsdemux;
+ GList *walk;
+
+ for (walk = demux->streams; walk != NULL; walk = walk->next) {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (walk->data);
+ gst_hls_demux_stream_clear_pending_data (hls_stream);
+ }
+ }
+
+ #if 0
+ static void
+ gst_hls_demux_set_current (GstHLSDemux * self, GstM3U8 * m3u8)
+ {
+ GST_M3U8_CLIENT_LOCK (self);
+ if (m3u8 != self->current) {
+ self->current = m3u8;
+ self->current->duration = GST_CLOCK_TIME_NONE;
+ self->current->current_file = NULL;
+
+ #if 0
+ // FIXME: this makes no sense after we just set self->current=m3u8 above (tpm)
+ // also, these values don't necessarily align between different lists
+ m3u8->current_file_duration = self->current->current_file_duration;
+ m3u8->sequence = self->current->sequence;
+ m3u8->sequence_position = self->current->sequence_position;
+ m3u8->highest_sequence_number = self->current->highest_sequence_number;
+ m3u8->first_file_start = self->current->first_file_start;
+ m3u8->last_file_end = self->current->last_file_end;
+ #endif
+ }
+ GST_M3U8_CLIENT_UNLOCK (self);
+ }
+ #endif
+
+ #define SEEK_UPDATES_PLAY_POSITION(r, start_type, stop_type) \
+ ((r >= 0 && start_type != GST_SEEK_TYPE_NONE) || \
+ (r < 0 && stop_type != GST_SEEK_TYPE_NONE))
+
+ #define IS_SNAP_SEEK(f) (f & (GST_SEEK_FLAG_SNAP_BEFORE | \
+ GST_SEEK_FLAG_SNAP_AFTER | \
+ GST_SEEK_FLAG_SNAP_NEAREST | \
+ GST_SEEK_FLAG_TRICKMODE_KEY_UNITS | \
+ GST_SEEK_FLAG_KEY_UNIT))
+
+ static gboolean
+ gst_hls_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate, old_rate;
+ GList *walk;
+ GstClockTime current_pos, target_pos, final_pos;
+ guint64 bitrate;
+
+ gst_event_parse_seek (seek, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (!SEEK_UPDATES_PLAY_POSITION (rate, start_type, stop_type)) {
+ /* nothing to do if we don't have to update the current position */
+ return TRUE;
+ }
+
+ old_rate = demux->segment.rate;
+
+ bitrate = gst_hls_demux_get_bitrate (hlsdemux);
+
+ /* Use I-frame variants for trick modes */
+ if (hlsdemux->master->iframe_variants != NULL
+ && rate < -1.0 && old_rate >= -1.0 && old_rate <= 1.0) {
+ GError *err = NULL;
+
+ /* Switch to I-frame variant */
+ gst_hls_demux_set_current_variant (hlsdemux,
+ hlsdemux->master->iframe_variants->data);
+ gst_uri_downloader_reset (demux->downloader);
+ if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+ GST_ELEMENT_ERROR_FROM_ERROR (hlsdemux, "Could not switch playlist", err);
+ return FALSE;
+ }
+ //hlsdemux->discont = TRUE;
+
+ gst_hls_demux_change_playlist (hlsdemux, bitrate / ABS (rate), NULL);
+ } else if (rate > -1.0 && rate <= 1.0 && (old_rate < -1.0 || old_rate > 1.0)) {
+ GError *err = NULL;
+ /* Switch to normal variant */
+ gst_hls_demux_set_current_variant (hlsdemux,
+ hlsdemux->master->variants->data);
+ gst_uri_downloader_reset (demux->downloader);
+ if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+ GST_ELEMENT_ERROR_FROM_ERROR (hlsdemux, "Could not switch playlist", err);
+ return FALSE;
+ }
+ //hlsdemux->discont = TRUE;
+ /* TODO why not continue using the same? that was being used up to now? */
+ gst_hls_demux_change_playlist (hlsdemux, bitrate, NULL);
+ }
+
+ target_pos = rate < 0 ? stop : start;
+ final_pos = target_pos;
+
+ /* properly cleanup pending decryption status */
+ if (flags & GST_SEEK_FLAG_FLUSH) {
+ gst_hls_demux_clear_all_pending_data (hlsdemux);
+ }
+
+ for (walk = demux->streams; walk; walk = g_list_next (walk)) {
+ GstAdaptiveDemuxStream *stream =
+ GST_ADAPTIVE_DEMUX_STREAM_CAST (walk->data);
+
+ gst_hls_demux_stream_seek (stream, rate >= 0, flags, target_pos,
+ ¤t_pos);
+
+ /* FIXME: use minimum position always ? */
++#ifdef TIZEN_FEATURE_HLSDEMUX_UPDATE_SEGMENT
++ if ((final_pos > current_pos) &&
++ (GST_HLS_DEMUX_STREAM_CAST (stream)->stream_type !=
++ GST_HLS_TSREADER_NONE))
++#else
+ if (final_pos > current_pos)
++#endif
+ final_pos = current_pos;
+ }
+
+ if (IS_SNAP_SEEK (flags)) {
+ if (rate >= 0)
+ gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+ final_pos, stop_type, stop, NULL);
+ else
+ gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+ start, stop_type, final_pos, NULL);
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_stream_seek (GstAdaptiveDemuxStream * stream, gboolean forward,
+ GstSeekFlags flags, GstClockTime ts, GstClockTime * final_ts)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+ GList *walk;
+ GstClockTime current_pos;
+ gint64 current_sequence;
+ gboolean snap_after, snap_nearest;
+ GstM3U8MediaFile *file = NULL;
+
+ current_sequence = 0;
+ current_pos = gst_m3u8_is_live (hls_stream->playlist) ?
+ hls_stream->playlist->first_file_start : 0;
+
+ /* Snap to segment boundary. Improves seek performance on slow machines. */
+ snap_nearest =
+ (flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST;
+ snap_after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+
+ GST_M3U8_CLIENT_LOCK (hlsdemux->client);
+ /* FIXME: Here we need proper discont handling */
+ for (walk = hls_stream->playlist->files; walk; walk = walk->next) {
+ file = walk->data;
+
+ current_sequence = file->sequence;
+ if ((forward && snap_after) || snap_nearest) {
+ if (current_pos >= ts)
+ break;
+ if (snap_nearest && ts - current_pos < file->duration / 2)
+ break;
+ } else if (!forward && snap_after) {
+ /* check if the next fragment is our target, in this case we want to
+ * start from the previous fragment */
+ GstClockTime next_pos = current_pos + file->duration;
+
+ if (next_pos <= ts && ts < next_pos + file->duration) {
+ break;
+ }
+ } else if (current_pos <= ts && ts < current_pos + file->duration) {
+ break;
+ }
+ current_pos += file->duration;
+ }
+
+ if (walk == NULL) {
+ GST_DEBUG_OBJECT (stream->pad, "seeking further than track duration");
+ current_sequence++;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "seeking to sequence %u",
+ (guint) current_sequence);
+ hls_stream->reset_pts = TRUE;
+ hls_stream->playlist->sequence = current_sequence;
+ hls_stream->playlist->current_file = walk;
+ hls_stream->playlist->sequence_position = current_pos;
+ GST_M3U8_CLIENT_UNLOCK (hlsdemux->client);
+
+ /* Play from the end of the current selected segment */
+ if (file) {
+ if (!forward && IS_SNAP_SEEK (flags))
+ current_pos += file->duration;
+ }
+
+ /* update stream's segment position */
+ stream->segment.position = current_pos;
+
+ if (final_ts)
+ *final_ts = current_pos;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_update_manifest (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ if (!gst_hls_demux_update_playlist (hlsdemux, TRUE, NULL))
+ return GST_FLOW_ERROR;
+
+ return GST_FLOW_OK;
+ }
+
+ static void
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++create_stream_for_playlist (GstAdaptiveDemux * demux, GstM3U8 * playlist,
++ gboolean is_primary_playlist, gboolean selected, GstHLSMedia * media)
++#else
+ create_stream_for_playlist (GstAdaptiveDemux * demux, GstM3U8 * playlist,
+ gboolean is_primary_playlist, gboolean selected)
++#endif
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstHLSDemuxStream *hlsdemux_stream;
+ GstAdaptiveDemuxStream *stream;
+
+ if (!selected) {
+ /* FIXME: Later, create the stream but mark not-selected */
+ GST_LOG_OBJECT (demux, "Ignoring not-selected stream");
+ return;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "is_primary_playlist:%d selected:%d playlist name '%s'",
+ is_primary_playlist, selected, playlist->name);
+
+ stream = gst_adaptive_demux_stream_new (demux,
+ gst_hls_demux_create_pad (hlsdemux));
+
+ hlsdemux_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+
+ hlsdemux_stream->stream_type = GST_HLS_TSREADER_NONE;
+
+ hlsdemux_stream->playlist = gst_m3u8_ref (playlist);
+ hlsdemux_stream->is_primary_playlist = is_primary_playlist;
+
+ hlsdemux_stream->do_typefind = TRUE;
+ hlsdemux_stream->reset_pts = TRUE;
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE
++ hlsdemux_stream->failed_count = 0;
++#endif
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT
++ hlsdemux_stream->sequence_pos = GST_CLOCK_TIME_NONE;
++ hlsdemux_stream->last_pcr = GST_CLOCK_TIME_NONE;
++#endif
++
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++ if (media) {
++ gst_hlsdemux_set_stream_event (stream, media);
++ return;
++ }
++
++ for (GList * mlist =
++ hlsdemux->current_variant->media[GST_HLS_MEDIA_TYPE_AUDIO]; mlist;
++ mlist = g_list_next (mlist)) {
++ GstHLSMedia *media = mlist->data;
++ if (!media->uri && gst_hlsdemux_set_stream_event (stream, media))
++ return;
++ }
++#endif
+ }
+
+ static GstHLSDemuxStream *
+ find_adaptive_stream_for_playlist (GstAdaptiveDemux * demux, GstM3U8 * playlist)
+ {
+ GList *tmp;
+
+ GST_DEBUG_OBJECT (demux, "Looking for existing stream for '%s' %s",
+ playlist->name, playlist->uri);
+
+ for (tmp = demux->streams; tmp; tmp = tmp->next) {
+ GstHLSDemuxStream *hlsstream = (GstHLSDemuxStream *) tmp->data;
+ if (hlsstream->playlist == playlist)
+ return hlsstream;
+ }
+
+ return NULL;
+ }
+
+ /* Returns TRUE if the previous and current (to switch to) variant are compatible.
+ *
+ * That is:
+ * * They have the same number of streams
+ * * The streams are of the same type
+ */
+ static gboolean
+ new_variant_is_compatible (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstHLSVariantStream *previous = hlsdemux->previous_variant;
+ GstHLSVariantStream *current = hlsdemux->current_variant;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux,
+ "Checking whether new variant is compatible with previous");
+
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ GList *mlist = current->media[i];
+ if (g_list_length (previous->media[i]) != g_list_length (current->media[i])) {
+ GST_LOG_OBJECT (demux, "Number of medias for type %s don't match",
+ gst_hls_media_type_get_name (i));
+ return FALSE;
+ }
+
+ /* Check if all new media were present in previous (if not there are new ones) */
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+ if (!gst_hls_variant_find_matching_media (previous, media)) {
+ GST_LOG_OBJECT (demux,
+ "New stream of type %s present. Variant not compatible",
+ gst_hls_media_type_get_name (i));
+ return FALSE;
+ }
+ mlist = mlist->next;
+ }
+
+ /* Check if all old media are present in current (if not some have gone) */
+ mlist = previous->media[i];
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+ if (!gst_hls_variant_find_matching_media (current, media)) {
+ GST_LOG_OBJECT (demux,
+ "Old stream of type %s gone. Variant not compatible",
+ gst_hls_media_type_get_name (i));
+ return FALSE;
+ }
+ mlist = mlist->next;
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "Variants are compatible");
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_hls_demux_setup_streams (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstHLSVariantStream *playlist = hlsdemux->current_variant;
+ gint i;
+
+ if (playlist == NULL) {
+ GST_WARNING_OBJECT (demux, "Can't configure streams - no variant selected");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Setting up streams");
+ if (hlsdemux->streams_aware && hlsdemux->previous_variant &&
+ new_variant_is_compatible (demux)) {
+ GstHLSDemuxStream *hlsstream;
+ GST_DEBUG_OBJECT (demux, "Have a previous variant, Re-using streams");
+
+ /* Carry over the main playlist */
+ hlsstream =
+ find_adaptive_stream_for_playlist (demux,
+ hlsdemux->previous_variant->m3u8);
+ if (G_UNLIKELY (hlsstream == NULL))
+ goto no_match_error;
+
+ gst_m3u8_unref (hlsstream->playlist);
+ hlsstream->playlist = gst_m3u8_ref (playlist->m3u8);
+
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ GList *mlist = playlist->media[i];
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+ GstHLSMedia *old_media =
+ gst_hls_variant_find_matching_media (hlsdemux->previous_variant,
+ media);
+
+ if (G_UNLIKELY (old_media == NULL)) {
+ GST_FIXME_OBJECT (demux, "Handle new stream !");
+ goto no_match_error;
+ }
+ if (!g_strcmp0 (media->uri, old_media->uri))
+ GST_DEBUG_OBJECT (demux, "Identical stream !");
+ if (media->mtype == GST_HLS_MEDIA_TYPE_AUDIO ||
-
++ media->mtype == GST_HLS_MEDIA_TYPE_VIDEO ||
++ media->mtype == GST_HLS_MEDIA_TYPE_SUBTITLES) {
+ hlsstream =
+ find_adaptive_stream_for_playlist (demux, old_media->playlist);
+ if (!hlsstream)
+ goto no_match_error;
+
+ GST_DEBUG_OBJECT (demux, "Found matching stream");
+ gst_m3u8_unref (hlsstream->playlist);
+ hlsstream->playlist = gst_m3u8_ref (media->playlist);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Skipping stream of type %s",
+ gst_hls_media_type_get_name (media->mtype));
+ }
+
+ mlist = mlist->next;
+ }
+ }
+
+ return TRUE;
+ }
+
+ /* FIXME : This seems wrong and assumes there's only one stream :( */
+ gst_hls_demux_clear_all_pending_data (hlsdemux);
+
+ /* 1 output for the main playlist */
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++ create_stream_for_playlist (demux, playlist->m3u8, TRUE, TRUE, NULL);
++#else
+ create_stream_for_playlist (demux, playlist->m3u8, TRUE, TRUE);
- || media->mtype == GST_HLS_MEDIA_TYPE_AUDIO));
-
++#endif
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ GList *mlist = playlist->media[i];
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+
+ if (media->uri == NULL /* || media->mtype != GST_HLS_MEDIA_TYPE_AUDIO */ ) {
+ /* No uri means this is a placeholder for a stream
+ * contained in another mux */
+ GST_LOG_OBJECT (demux, "Skipping stream %s type %s with no URI",
+ media->name, gst_hls_media_type_get_name (media->mtype));
+ mlist = mlist->next;
+ continue;
+ }
+ GST_LOG_OBJECT (demux, "media of type %s - %s, uri: %s",
+ gst_hls_media_type_get_name (i), media->name, media->uri);
+ create_stream_for_playlist (demux, media->playlist, FALSE,
+ (media->mtype == GST_HLS_MEDIA_TYPE_VIDEO
-
++ || media->mtype == GST_HLS_MEDIA_TYPE_AUDIO
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++ || media->mtype == GST_HLS_MEDIA_TYPE_SUBTITLES), media);
++#else
++ || media->mtype == GST_HLS_MEDIA_TYPE_SUBTITLES));
++#endif
+ mlist = mlist->next;
+ }
+ }
+
+ return TRUE;
+
+ no_match_error:
+ {
+ /* POST ERROR MESSAGE */
+ GST_ERROR_OBJECT (demux, "Should not happen ! Could not find old stream");
+ return FALSE;
+ }
+ }
+
+ static const gchar *
+ gst_adaptive_demux_get_manifest_ref_uri (GstAdaptiveDemux * d)
+ {
+ return d->manifest_base_uri ? d->manifest_base_uri : d->manifest_uri;
+ }
+
+ static void
+ gst_hls_demux_set_current_variant (GstHLSDemux * hlsdemux,
+ GstHLSVariantStream * variant)
+ {
+ if (hlsdemux->current_variant == variant || variant == NULL)
+ return;
+
++ GST_INFO_OBJECT (hlsdemux, "%s variant %d, %d x %d", variant->name,
++ variant->bandwidth, variant->width, variant->height);
+ if (hlsdemux->current_variant != NULL) {
+ gint i;
+
+ //#warning FIXME: Syncing fragments across variants
+ // should be done based on media timestamps, and
+ // discont-sequence-numbers not sequence numbers.
+ variant->m3u8->sequence_position =
+ hlsdemux->current_variant->m3u8->sequence_position;
+ variant->m3u8->sequence = hlsdemux->current_variant->m3u8->sequence;
+
+ GST_DEBUG_OBJECT (hlsdemux,
+ "Switching Variant. Copying over sequence %" G_GINT64_FORMAT
+ " and sequence_pos %" GST_TIME_FORMAT, variant->m3u8->sequence,
+ GST_TIME_ARGS (variant->m3u8->sequence_position));
+
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ GList *mlist = hlsdemux->current_variant->media[i];
+
+ while (mlist != NULL) {
+ GstHLSMedia *old_media = mlist->data;
+ GstHLSMedia *new_media =
+ gst_hls_variant_find_matching_media (variant, old_media);
+
+ if (new_media) {
+ GST_LOG_OBJECT (hlsdemux, "Found matching GstHLSMedia");
+ GST_LOG_OBJECT (hlsdemux, "old_media '%s' '%s'", old_media->name,
+ old_media->uri);
+ GST_LOG_OBJECT (hlsdemux, "new_media '%s' '%s'", new_media->name,
+ new_media->uri);
+ new_media->playlist->sequence = old_media->playlist->sequence;
+ new_media->playlist->sequence_position =
+ old_media->playlist->sequence_position;
+ } else {
+ GST_LOG_OBJECT (hlsdemux,
+ "Didn't find a matching variant for '%s' '%s'", old_media->name,
+ old_media->uri);
+ }
+ mlist = mlist->next;
+ }
+ }
+
+ if (hlsdemux->previous_variant)
+ gst_hls_variant_stream_unref (hlsdemux->previous_variant);
+ /* Steal the reference */
+ hlsdemux->previous_variant = hlsdemux->current_variant;
+ }
+
+ hlsdemux->current_variant = gst_hls_variant_stream_ref (variant);
+
+ }
+
+ static gboolean
+ gst_hls_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
+ {
+ GstHLSVariantStream *variant;
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ gchar *playlist = NULL;
+
+ GST_INFO_OBJECT (demux, "Initial playlist location: %s (base uri: %s)",
+ demux->manifest_uri, demux->manifest_base_uri);
+
+ playlist = gst_hls_src_buf_to_utf8_playlist (buf);
+ if (playlist == NULL) {
+ GST_WARNING_OBJECT (demux, "Error validating initial playlist");
+ return FALSE;
+ }
+
+ GST_M3U8_CLIENT_LOCK (self);
+ hlsdemux->master = gst_hls_master_playlist_new_from_data (playlist,
+ gst_adaptive_demux_get_manifest_ref_uri (demux));
+
+ if (hlsdemux->master == NULL || hlsdemux->master->variants == NULL) {
+ /* In most cases, this will happen if we set a wrong url in the
+ * source element and we have received the 404 HTML response instead of
+ * the playlist */
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."),
+ ("Could not parse playlist. Check if the URL is correct."));
+ GST_M3U8_CLIENT_UNLOCK (self);
+ return FALSE;
+ }
+
+ /* select the initial variant stream */
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ variant =
++ gst_hls_master_playlist_get_variant_for_bandwitdh_limit (hlsdemux->master,
++ NULL, demux->connection_speed, demux->start_bandwidth,
++ demux->min_bandwidth, demux->max_bandwidth, demux->max_width,
++ demux->max_height);
++
++ if (!variant)
++ variant = hlsdemux->master->default_variant;
++
++ if (variant) {
++ GST_INFO_OBJECT (hlsdemux, "selected %s, %d, %d x %d",
++ variant->name, variant->bandwidth, variant->width, variant->height);
++ gst_hls_demux_set_current_variant (hlsdemux, variant);
++ }
++#else
+ if (demux->connection_speed == 0) {
+ variant = hlsdemux->master->default_variant;
+ } else {
+ variant =
+ gst_hls_master_playlist_get_variant_for_bitrate (hlsdemux->master,
+ NULL, demux->connection_speed);
+ }
-
+ if (variant) {
+ GST_INFO_OBJECT (hlsdemux, "selected %s", variant->name);
+ gst_hls_demux_set_current_variant (hlsdemux, variant); // FIXME: inline?
+ }
++#endif
++
++#ifdef TIZEN_FEATURE_AD
++ if (variant) {
++ GST_DEBUG_OBJECT (hlsdemux, "post AD info");
++ gst_element_post_message (GST_ELEMENT_CAST (hlsdemux),
++ gst_message_new_element (GST_OBJECT_CAST (hlsdemux),
++ gst_structure_new ("adaptive-ad-info",
++ "ad-info", G_TYPE_POINTER, variant->m3u8->ad_info, NULL)));
++
++ GST_DEBUG_OBJECT (hlsdemux, "post current bandwidth info : %d",
++ variant->bandwidth);
++ gst_element_post_message (GST_ELEMENT_CAST (hlsdemux),
++ gst_message_new_element (GST_OBJECT_CAST (hlsdemux),
++ gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
++ "bitrate", G_TYPE_INT, variant->bandwidth, NULL)));
++ }
++#endif
+
+ /* get the selected media playlist (unless the initial list was one already) */
+ if (!hlsdemux->master->is_simple) {
+ GError *err = NULL;
+
+ if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+ GST_ELEMENT_ERROR_FROM_ERROR (demux, "Could not fetch media playlist",
+ err);
+ GST_M3U8_CLIENT_UNLOCK (self);
+ return FALSE;
+ }
+ }
+ GST_M3U8_CLIENT_UNLOCK (self);
+
+ return gst_hls_demux_setup_streams (demux);
+ }
+
+ static GstClockTime
+ gst_hls_demux_get_duration (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+ if (hlsdemux->current_variant != NULL)
+ duration = gst_m3u8_get_duration (hlsdemux->current_variant->m3u8);
+
+ return duration;
+ }
+
+ static gboolean
+ gst_hls_demux_is_live (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ gboolean is_live = FALSE;
+
+ if (hlsdemux->current_variant)
+ is_live = gst_hls_variant_stream_is_live (hlsdemux->current_variant);
+
+ return is_live;
+ }
+
+ static const GstHLSKey *
+ gst_hls_demux_get_key (GstHLSDemux * demux, const gchar * key_url,
+ const gchar * referer, gboolean allow_cache)
+ {
+ GstFragment *key_fragment;
+ GstBuffer *key_buffer;
+ GstHLSKey *key;
+ GError *err = NULL;
+
+ GST_LOG_OBJECT (demux, "Looking up key for key url %s", key_url);
+
+ g_mutex_lock (&demux->keys_lock);
+
+ key = g_hash_table_lookup (demux->keys, key_url);
+
+ if (key != NULL) {
+ GST_LOG_OBJECT (demux, "Found key for key url %s in key cache", key_url);
+ goto out;
+ }
+
+ GST_INFO_OBJECT (demux, "Fetching key %s", key_url);
- /* Won't need this mapping any more all paths return inside this if() */
- gst_buffer_unmap (buffer, &info);
-
- /* Only fail typefinding if we already a good amount of data
- * and we still don't know the type */
- if (buffer_size > (2 * 1024 * 1024) || at_eos) {
- GST_ELEMENT_ERROR (hlsdemux, STREAM, TYPE_NOT_FOUND,
- ("Could not determine type of stream"), (NULL));
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ key_fragment =
++ gst_uri_downloader_fetch_uri (GST_ADAPTIVE_DEMUX (demux)->downloader,
++ key_url, referer, GST_ADAPTIVE_DEMUX (demux)->user_agent,
++ GST_ADAPTIVE_DEMUX (demux)->cookies, DEFAULT_ADAPTIVE_RETRY,
++ DEFAULT_ADAPTIVE_TIMEOUT, FALSE, FALSE, allow_cache, &err);
++#else
+ key_fragment =
+ gst_uri_downloader_fetch_uri (GST_ADAPTIVE_DEMUX (demux)->downloader,
+ key_url, referer, FALSE, FALSE, allow_cache, &err);
++#endif
+
+ if (key_fragment == NULL) {
+ GST_WARNING_OBJECT (demux, "Failed to download key to decrypt data: %s",
+ err ? err->message : "error");
+ g_clear_error (&err);
+ goto out;
+ }
+
+ key_buffer = gst_fragment_get_buffer (key_fragment);
+
+ key = g_new0 (GstHLSKey, 1);
+ if (gst_buffer_extract (key_buffer, 0, key->data, 16) < 16)
+ GST_WARNING_OBJECT (demux, "Download decryption key is too short!");
+
+ g_hash_table_insert (demux->keys, g_strdup (key_url), key);
+
+ gst_buffer_unref (key_buffer);
+ g_object_unref (key_fragment);
+
+ out:
+
+ g_mutex_unlock (&demux->keys_lock);
+
+ if (key != NULL)
+ GST_MEMDUMP_OBJECT (demux, "Key", key->data, 16);
+
+ return key;
+ }
+
+ static gboolean
+ gst_hls_demux_start_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ const GstHLSKey *key;
+ GstM3U8 *m3u8;
+
+ gst_hls_demux_stream_clear_pending_data (hls_stream);
+
+ /* Init the timestamp reader for this fragment */
+ gst_hlsdemux_tsreader_init (&hls_stream->tsreader);
+ /* Reset the stream type if we already know it */
+ gst_hlsdemux_tsreader_set_type (&hls_stream->tsreader,
+ hls_stream->stream_type);
+
+ /* If no decryption is needed, there's nothing to be done here */
+ if (hls_stream->current_key == NULL)
+ return TRUE;
+
+ m3u8 = gst_hls_demux_stream_get_m3u8 (hls_stream);
+
+ key = gst_hls_demux_get_key (hlsdemux, hls_stream->current_key,
+ m3u8->uri, m3u8->allowcache);
+
+ if (key == NULL)
+ goto key_failed;
+
+ if (!gst_hls_demux_stream_decrypt_start (hls_stream, key->data,
+ hls_stream->current_iv))
+ goto decrypt_start_failed;
+
+ return TRUE;
+
+ key_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DECRYPT_NOKEY,
+ ("Couldn't retrieve key for decryption"), (NULL));
+ GST_WARNING_OBJECT (demux, "Failed to decrypt data");
+ return FALSE;
+ }
+ decrypt_start_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DECRYPT, ("Failed to start decrypt"),
+ ("Couldn't set key and IV or plugin was built without crypto library"));
+ return FALSE;
+ }
+ }
+
+ static GstHLSTSReaderType
+ caps_to_reader (const GstCaps * caps)
+ {
+ const GstStructure *s = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (s, "video/mpegts"))
+ return GST_HLS_TSREADER_MPEGTS;
+ if (gst_structure_has_name (s, "application/x-id3"))
+ return GST_HLS_TSREADER_ID3;
+
+ return GST_HLS_TSREADER_NONE;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_handle_buffer (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer, gboolean at_eos)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream); // FIXME: pass HlsStream into function
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstClockTime first_pcr, last_pcr;
+ GstTagList *tags;
+
+ if (buffer == NULL)
+ return GST_FLOW_OK;
+
+ if (G_UNLIKELY (hls_stream->do_typefind)) {
+ GstCaps *caps = NULL;
+ guint buffer_size;
+ GstTypeFindProbability prob = GST_TYPE_FIND_NONE;
+ GstMapInfo info;
+
+ if (hls_stream->pending_typefind_buffer)
+ buffer = gst_buffer_append (hls_stream->pending_typefind_buffer, buffer);
+ hls_stream->pending_typefind_buffer = NULL;
+
+ gst_buffer_map (buffer, &info, GST_MAP_READ);
+ buffer_size = info.size;
+
+ /* Typefind could miss if buffer is too small. In this case we
+ * will retry later */
+ if (buffer_size >= (2 * 1024) || at_eos) {
+ caps =
+ gst_type_find_helper_for_data (GST_OBJECT_CAST (hlsdemux), info.data,
+ info.size, &prob);
+ }
+
+ if (G_UNLIKELY (!caps)) {
- return GST_FLOW_NOT_NEGOTIATED;
- }
++#ifdef TIZEN_FEATURE_HLSDEMUX_EMPTY_VTT
++ if (at_eos && info.data
++ && g_strrstr ((const gchar *) info.data, "WEBVTT")) {
++ gchar *dummy =
++ g_strdup ("WEBVTT\nX-TIMESTAMP-MAP=LOCAL:00:00:00.000,MPEGTS:0");
++ GstBuffer *dummy_buffer =
++ gst_buffer_new_wrapped (dummy, strlen (dummy));
++
++ gst_buffer_unmap (buffer, &info);
+ gst_buffer_unref (buffer);
- hls_stream->pending_typefind_buffer = buffer;
+
- return GST_FLOW_OK;
++ GST_WARNING_OBJECT (stream->pad,
++ "replace the empty VTT buffer with dummy");
+
-
++ buffer = dummy_buffer;
++ gst_buffer_map (buffer, &info, GST_MAP_READ);
++
++ caps = gst_caps_new_simple ("application/x-subtitle-vtt",
++ "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
++ } else
++#endif
++ {
++ /* Won't need this mapping any more all paths return inside this if() */
++ gst_buffer_unmap (buffer, &info);
++
++ /* Only fail typefinding if we already a good amount of data
++ * and we still don't know the type */
++ if (buffer_size > (2 * 1024 * 1024) || at_eos) {
++ GST_ELEMENT_ERROR (hlsdemux, STREAM, TYPE_NOT_FOUND,
++ ("Could not determine type of stream"), (NULL));
++ gst_buffer_unref (buffer);
++ return GST_FLOW_NOT_NEGOTIATED;
++ }
++
++ hls_stream->pending_typefind_buffer = buffer;
++
++ return GST_FLOW_OK;
++ }
+ }
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Typefind result: %" GST_PTR_FORMAT " prob:%d", caps, prob);
+
+ hls_stream->stream_type = caps_to_reader (caps);
+ gst_hlsdemux_tsreader_set_type (&hls_stream->tsreader,
+ hls_stream->stream_type);
+
+ gst_adaptive_demux_stream_set_caps (stream, caps);
+
+ hls_stream->do_typefind = FALSE;
+
+ gst_buffer_unmap (buffer, &info);
+ }
+ g_assert (hls_stream->pending_typefind_buffer == NULL);
+
+ // Accumulate this buffer
+ if (hls_stream->pending_pcr_buffer) {
+ buffer = gst_buffer_append (hls_stream->pending_pcr_buffer, buffer);
+ hls_stream->pending_pcr_buffer = NULL;
+ }
+
+ if (!gst_hlsdemux_tsreader_find_pcrs (&hls_stream->tsreader, &buffer,
+ &first_pcr, &last_pcr, &tags)
+ && !at_eos) {
+ // Store this buffer for later
+ hls_stream->pending_pcr_buffer = buffer;
+ return GST_FLOW_OK;
+ }
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT
++ if (stream->first_fragment_buffer && !stream->discont
++ && GST_CLOCK_TIME_IS_VALID (hls_stream->last_pcr)
++ && GST_CLOCK_TIME_IS_VALID (last_pcr)) {
++ if (G_UNLIKELY (ABSDIFF (hls_stream->last_pcr, last_pcr) > 1 * GST_SECOND)
++ && (stream->fragment.timestamp != hls_stream->sequence_pos)) {
++ GST_DEBUG_OBJECT (stream->pad,
++ "Overwriting fragment timestamp [%" GST_TIME_FORMAT "] to [%"
++ GST_TIME_FORMAT "]", GST_TIME_ARGS (stream->fragment.timestamp),
++ GST_TIME_ARGS (hls_stream->sequence_pos));
++ stream->fragment.timestamp = hls_stream->sequence_pos;
++ stream->discont = TRUE;
++ }
++ }
++
++ if (GST_CLOCK_TIME_IS_VALID (last_pcr))
++ hls_stream->last_pcr = last_pcr;
++#endif
+
+ if (tags) {
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++ if (stream->pending_tags)
++ gst_tag_list_insert (tags, stream->pending_tags, GST_TAG_MERGE_APPEND);
++#endif
+ gst_adaptive_demux_stream_set_tags (stream, tags);
+ /* run typefind again on the trimmed buffer */
+ hls_stream->do_typefind = TRUE;
+ return gst_hls_demux_handle_buffer (demux, stream, buffer, at_eos);
+ }
+
+ if (buffer) {
+ buffer = gst_buffer_make_writable (buffer);
+ GST_BUFFER_OFFSET (buffer) = hls_stream->current_offset;
+ hls_stream->current_offset += gst_buffer_get_size (buffer);
+ GST_BUFFER_OFFSET_END (buffer) = hls_stream->current_offset;
+ return gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ }
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream); // FIXME: pass HlsStream into function
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (hls_stream->current_key)
+ gst_hls_demux_stream_decrypt_end (hls_stream);
+
+ if (stream->last_ret == GST_FLOW_OK) {
+ if (hls_stream->pending_decrypted_buffer) {
+ if (hls_stream->current_key) {
+ GstMapInfo info;
+ gssize unpadded_size;
+
+ /* Handle pkcs7 unpadding here */
+ gst_buffer_map (hls_stream->pending_decrypted_buffer, &info,
+ GST_MAP_READ);
+ unpadded_size = info.size - info.data[info.size - 1];
+ gst_buffer_unmap (hls_stream->pending_decrypted_buffer, &info);
+
+ gst_buffer_resize (hls_stream->pending_decrypted_buffer, 0,
+ unpadded_size);
+ }
+
+ ret =
+ gst_hls_demux_handle_buffer (demux, stream,
+ hls_stream->pending_decrypted_buffer, TRUE);
+ hls_stream->pending_decrypted_buffer = NULL;
+ }
+
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED) {
+ if (G_UNLIKELY (hls_stream->pending_typefind_buffer)) {
+ GstBuffer *buf = hls_stream->pending_typefind_buffer;
+ hls_stream->pending_typefind_buffer = NULL;
+
+ gst_hls_demux_handle_buffer (demux, stream, buf, TRUE);
+ }
+
+ if (hls_stream->pending_pcr_buffer) {
+ GstBuffer *buf = hls_stream->pending_pcr_buffer;
+ hls_stream->pending_pcr_buffer = NULL;
+
+ ret = gst_hls_demux_handle_buffer (demux, stream, buf, TRUE);
+ }
+
+ GST_LOG_OBJECT (stream->pad,
+ "Fragment PCRs were %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (hls_stream->tsreader.first_pcr),
+ GST_TIME_ARGS (hls_stream->tsreader.last_pcr));
+ }
+ }
+
+ if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+ return GST_FLOW_OK;
+
+ gst_hls_demux_stream_clear_pending_data (hls_stream);
+
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED)
+ return gst_adaptive_demux_stream_advance_fragment (demux, stream,
+ stream->fragment.duration);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_data_received (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+
+ if (hls_stream->current_offset == -1)
+ hls_stream->current_offset = 0;
+
+ /* Is it encrypted? */
+ if (hls_stream->current_key) {
+ GError *err = NULL;
+ gsize size;
+ GstBuffer *tmp_buffer;
+
+ if (hls_stream->pending_encrypted_data == NULL)
+ hls_stream->pending_encrypted_data = gst_adapter_new ();
+
+ gst_adapter_push (hls_stream->pending_encrypted_data, buffer);
+ size = gst_adapter_available (hls_stream->pending_encrypted_data);
+
+ /* must be a multiple of 16 */
+ size &= (~0xF);
+
+ if (size == 0) {
+ return GST_FLOW_OK;
+ }
+
+ buffer = gst_adapter_take_buffer (hls_stream->pending_encrypted_data, size);
+ buffer =
+ gst_hls_demux_decrypt_fragment (hlsdemux, hls_stream, buffer, &err);
+ if (buffer == NULL) {
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Failed to decrypt buffer"),
+ ("decryption failed %s", err->message));
+ g_error_free (err);
+ return GST_FLOW_ERROR;
+ }
+
+ tmp_buffer = hls_stream->pending_decrypted_buffer;
+ hls_stream->pending_decrypted_buffer = buffer;
+ buffer = tmp_buffer;
+ }
+
+ return gst_hls_demux_handle_buffer (demux, stream, buffer, FALSE);
+ }
+
+ static void
+ gst_hls_demux_stream_free (GstAdaptiveDemuxStream * stream)
+ {
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+
+ if (hls_stream->playlist) {
+ gst_m3u8_unref (hls_stream->playlist);
+ hls_stream->playlist = NULL;
+ }
+
+ if (hls_stream->pending_encrypted_data)
+ g_object_unref (hls_stream->pending_encrypted_data);
+
+ gst_buffer_replace (&hls_stream->pending_decrypted_buffer, NULL);
+ gst_buffer_replace (&hls_stream->pending_typefind_buffer, NULL);
+ gst_buffer_replace (&hls_stream->pending_pcr_buffer, NULL);
+
+ if (hls_stream->current_key) {
+ g_free (hls_stream->current_key);
+ hls_stream->current_key = NULL;
+ }
+ if (hls_stream->current_iv) {
+ g_free (hls_stream->current_iv);
+ hls_stream->current_iv = NULL;
+ }
+ gst_hls_demux_stream_decrypt_end (hls_stream);
+ }
+
+ static GstM3U8 *
+ gst_hls_demux_stream_get_m3u8 (GstHLSDemuxStream * hlsdemux_stream)
+ {
+ GstM3U8 *m3u8;
+
+ m3u8 = hlsdemux_stream->playlist;
+
+ return m3u8;
+ }
+
+ static gboolean
+ gst_hls_demux_stream_has_next_fragment (GstAdaptiveDemuxStream * stream)
+ {
+ gboolean has_next;
+ GstM3U8 *m3u8;
+
+ m3u8 = gst_hls_demux_stream_get_m3u8 (GST_HLS_DEMUX_STREAM_CAST (stream));
+
+ has_next = gst_m3u8_has_next_fragment (m3u8, stream->demux->segment.rate > 0);
+
+ return has_next;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_advance_fragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstHLSDemuxStream *hlsdemux_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+ GstM3U8 *m3u8;
+
+ m3u8 = gst_hls_demux_stream_get_m3u8 (hlsdemux_stream);
+
+ gst_m3u8_advance_fragment (m3u8, stream->demux->segment.rate > 0);
+ hlsdemux_stream->reset_pts = FALSE;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_hls_demux_update_fragment_info (GstAdaptiveDemuxStream * stream)
+ {
+ GstHLSDemuxStream *hlsdemux_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (stream->demux);
+ GstM3U8MediaFile *file;
+ GstClockTime sequence_pos;
+ gboolean discont, forward;
+ GstM3U8 *m3u8;
+
+ m3u8 = gst_hls_demux_stream_get_m3u8 (hlsdemux_stream);
+
+ forward = (stream->demux->segment.rate > 0);
+ file = gst_m3u8_get_next_fragment (m3u8, forward, &sequence_pos, &discont);
+
+ if (file == NULL) {
+ GST_INFO_OBJECT (hlsdemux, "This playlist doesn't contain more fragments");
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE
++ if (++hlsdemux_stream->failed_count > DEFAULT_FAILED_COUNT) {
++ GST_WARNING_OBJECT (hlsdemux,
++ "Reset media sequence(fail %d times to gst_m3u8_get_next_fragment)",
++ hlsdemux_stream->failed_count);
++ m3u8->sequence = 0;
++ }
++#endif
+ return GST_FLOW_EOS;
+ }
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE
++ hlsdemux_stream->failed_count = 0;
++#endif
+
+ if (GST_ADAPTIVE_DEMUX_STREAM_NEED_HEADER (stream) && file->init_file) {
+ GstM3U8InitFile *header_file = file->init_file;
+ stream->fragment.header_uri = g_strdup (header_file->uri);
+ stream->fragment.header_range_start = header_file->offset;
+ if (header_file->size != -1) {
+ stream->fragment.header_range_end =
+ header_file->offset + header_file->size - 1;
+ } else {
+ stream->fragment.header_range_end = -1;
+ }
+ }
+
+ if (stream->discont)
+ discont = TRUE;
+
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT
++ hlsdemux_stream->sequence_pos = sequence_pos;
++#endif
+ /* set up our source for download */
+ if (hlsdemux_stream->reset_pts || discont
+ || stream->demux->segment.rate < 0.0) {
+ stream->fragment.timestamp = sequence_pos;
+ } else {
+ stream->fragment.timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+ g_free (hlsdemux_stream->current_key);
+ hlsdemux_stream->current_key = g_strdup (file->key);
+ g_free (hlsdemux_stream->current_iv);
+ hlsdemux_stream->current_iv = g_memdup2 (file->iv, sizeof (file->iv));
+
+ g_free (stream->fragment.uri);
+ stream->fragment.uri = g_strdup (file->uri);
+
+ GST_DEBUG_OBJECT (hlsdemux, "Stream %p URI now %s", stream, file->uri);
+
+ stream->fragment.range_start = file->offset;
+ if (file->size != -1)
+ stream->fragment.range_end = file->offset + file->size - 1;
+ else
+ stream->fragment.range_end = -1;
+
+ stream->fragment.duration = file->duration;
+
+ if (discont)
+ stream->discont = TRUE;
+
+ gst_m3u8_media_file_unref (file);
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ gst_hls_demux_select_bitrate (GstAdaptiveDemuxStream * stream, guint64 bitrate)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (stream->demux);
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (stream->demux);
+ GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+
+ gboolean changed = FALSE;
+
+ GST_M3U8_CLIENT_LOCK (hlsdemux->client);
+ if (hlsdemux->master == NULL || hlsdemux->master->is_simple) {
+ GST_M3U8_CLIENT_UNLOCK (hlsdemux->client);
+ return FALSE;
+ }
+ GST_M3U8_CLIENT_UNLOCK (hlsdemux->client);
+
+ if (hls_stream->is_primary_playlist == FALSE) {
+ GST_LOG_OBJECT (hlsdemux,
+ "Stream %p Not choosing new bitrate - not the primary stream", stream);
+ return FALSE;
+ }
+
+ gst_hls_demux_change_playlist (hlsdemux, bitrate / MAX (1.0,
+ ABS (demux->segment.rate)), &changed);
+ if (changed)
+ gst_hls_demux_setup_streams (GST_ADAPTIVE_DEMUX_CAST (hlsdemux));
+ return changed;
+ }
+
+ static void
+ gst_hls_demux_reset (GstAdaptiveDemux * ademux)
+ {
+ GstHLSDemux *demux = GST_HLS_DEMUX_CAST (ademux);
+
+ GST_DEBUG_OBJECT (demux, "resetting");
+
+ GST_M3U8_CLIENT_LOCK (hlsdemux->client);
+ if (demux->master) {
+ gst_hls_master_playlist_unref (demux->master);
+ demux->master = NULL;
+ }
+ if (demux->current_variant != NULL) {
+ gst_hls_variant_stream_unref (demux->current_variant);
+ demux->current_variant = NULL;
+ }
+ if (demux->previous_variant != NULL) {
+ gst_hls_variant_stream_unref (demux->previous_variant);
+ demux->previous_variant = NULL;
+ }
+ demux->srcpad_counter = 0;
+ demux->streams_aware = GST_OBJECT_PARENT (demux)
+ && GST_OBJECT_FLAG_IS_SET (GST_OBJECT_PARENT (demux),
+ GST_BIN_FLAG_STREAMS_AWARE);
+ GST_DEBUG_OBJECT (demux, "Streams aware : %d", demux->streams_aware);
+
+ gst_hls_demux_clear_all_pending_data (demux);
+ GST_M3U8_CLIENT_UNLOCK (hlsdemux->client);
+ }
+
+ static gchar *
+ gst_hls_src_buf_to_utf8_playlist (GstBuffer * buf)
+ {
+ GstMapInfo info;
+ gchar *playlist;
+
+ if (!gst_buffer_map (buf, &info, GST_MAP_READ))
+ goto map_error;
+
+ if (!g_utf8_validate ((gchar *) info.data, info.size, NULL))
+ goto validate_error;
+
+ /* alloc size + 1 to end with a null character */
+ playlist = g_malloc0 (info.size + 1);
+ memcpy (playlist, info.data, info.size);
+
+ gst_buffer_unmap (buf, &info);
+ return playlist;
+
+ validate_error:
+ gst_buffer_unmap (buf, &info);
+ map_error:
+ return NULL;
+ }
+
+ static gint
+ gst_hls_demux_find_variant_match (const GstHLSVariantStream * a,
+ const GstHLSVariantStream * b)
+ {
+ if (g_strcmp0 (a->name, b->name) == 0 &&
+ a->bandwidth == b->bandwidth &&
+ a->program_id == b->program_id &&
+ g_strcmp0 (a->codecs, b->codecs) == 0 &&
+ a->width == b->width &&
+ a->height == b->height && a->iframe == b->iframe) {
+ return 0;
+ }
+
+ return 1;
+ }
+
+ /* Update the master playlist, which contains the list of available
+ * variants */
+ static gboolean
+ gst_hls_demux_update_variant_playlist (GstHLSDemux * hlsdemux, gchar * data,
+ const gchar * uri, const gchar * base_uri)
+ {
+ GstHLSMasterPlaylist *new_master, *old;
+ gboolean ret = FALSE;
+ GList *l, *unmatched_lists;
+ GstHLSVariantStream *new_variant;
+
+ new_master = gst_hls_master_playlist_new_from_data (data, base_uri ? base_uri : uri); // FIXME: check which uri to use here
+
+ if (new_master == NULL)
+ return ret;
+
+ if (new_master->is_simple) {
+ // FIXME: we should be able to support this though, in the unlikely
+ // case that it changed?
+ GST_ERROR
+ ("Cannot update variant playlist: New playlist is not a variant playlist");
+ gst_hls_master_playlist_unref (new_master);
+ return FALSE;
+ }
+
+ GST_M3U8_CLIENT_LOCK (self);
+
+ if (hlsdemux->master->is_simple) {
+ GST_ERROR
+ ("Cannot update variant playlist: Current playlist is not a variant playlist");
+ gst_hls_master_playlist_unref (new_master);
+ goto out;
+ }
+
+ /* Now see if the variant playlist still has the same lists */
+ unmatched_lists = g_list_copy (hlsdemux->master->variants);
+ for (l = new_master->variants; l != NULL; l = l->next) {
+ GList *match = g_list_find_custom (unmatched_lists, l->data,
+ (GCompareFunc) gst_hls_demux_find_variant_match);
+
+ if (match) {
+ GstHLSVariantStream *variant = l->data;
+ GstHLSVariantStream *old = match->data;
+
+ unmatched_lists = g_list_delete_link (unmatched_lists, match);
+ /* FIXME: Deal with losing position due to missing an update */
+ variant->m3u8->sequence_position = old->m3u8->sequence_position;
+ variant->m3u8->sequence = old->m3u8->sequence;
+ }
+ }
+
+ if (unmatched_lists != NULL) {
+ GST_WARNING ("Unable to match all playlists");
+
+ for (l = unmatched_lists; l != NULL; l = l->next) {
+ if (l->data == hlsdemux->current_variant) {
+ GST_WARNING ("Unable to match current playlist");
+ }
+ }
+
+ g_list_free (unmatched_lists);
+ }
+
+ /* Switch out the variant playlist */
+ old = hlsdemux->master;
+
+ // FIXME: check all this and also switch of variants, if anything needs updating
+ hlsdemux->master = new_master;
+
+ if (hlsdemux->current_variant == NULL) {
+ new_variant = new_master->default_variant;
+ } else {
+ /* Find the same variant in the new playlist */
+ new_variant =
+ gst_hls_master_playlist_get_matching_variant (new_master,
+ hlsdemux->current_variant);
+ }
+
+ /* Use the function to set the current variant, as it copies over data */
+ if (new_variant != NULL)
+ gst_hls_demux_set_current_variant (hlsdemux, new_variant);
+
+ gst_hls_master_playlist_unref (old);
+
+ ret = (hlsdemux->current_variant != NULL);
+ out:
+ GST_M3U8_CLIENT_UNLOCK (self);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_hls_demux_update_rendition_manifest (GstHLSDemux * demux,
+ GstHLSMedia * media, GError ** err)
+ {
+ GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX (demux);
+ GstFragment *download;
+ GstBuffer *buf;
+ gchar *playlist;
+ const gchar *main_uri;
+ GstM3U8 *m3u8;
+ gchar *uri = media->uri;
+
+ main_uri = gst_adaptive_demux_get_manifest_ref_uri (adaptive_demux);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (adaptive_demux->downloader, uri, main_uri,
++ adaptive_demux->user_agent, adaptive_demux->cookies,
++ DEFAULT_ADAPTIVE_RETRY, DEFAULT_ADAPTIVE_TIMEOUT, TRUE, TRUE, TRUE, err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (adaptive_demux->downloader, uri, main_uri,
+ TRUE, TRUE, TRUE, err);
-
++#endif
+ if (download == NULL)
+ return FALSE;
+
+ m3u8 = media->playlist;
+
+ /* Set the base URI of the playlist to the redirect target if any */
+ if (download->redirect_permanent && download->redirect_uri) {
+ gst_m3u8_set_uri (m3u8, download->redirect_uri, NULL, media->name);
+ } else {
+ gst_m3u8_set_uri (m3u8, download->uri, download->redirect_uri, media->name);
+ }
+
+ buf = gst_fragment_get_buffer (download);
+ playlist = gst_hls_src_buf_to_utf8_playlist (buf);
+ gst_buffer_unref (buf);
+ g_object_unref (download);
+
+ if (playlist == NULL) {
+ GST_WARNING_OBJECT (demux, "Couldn't validate playlist encoding");
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
+ "Couldn't validate playlist encoding");
+ return FALSE;
+ }
+
+ if (!gst_m3u8_update (m3u8, playlist)) {
+ GST_WARNING_OBJECT (demux, "Couldn't update playlist");
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED,
+ "Couldn't update playlist");
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_hls_demux_update_playlist (GstHLSDemux * demux, gboolean update,
+ GError ** err)
+ {
+ GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX (demux);
+ GstFragment *download;
+ GstBuffer *buf;
+ gchar *playlist;
+ gboolean main_checked = FALSE;
+ const gchar *main_uri;
+ GstM3U8 *m3u8;
+ gchar *uri;
+ gint i;
+
+ retry:
+ uri = gst_m3u8_get_uri (demux->current_variant->m3u8);
+ main_uri = gst_adaptive_demux_get_manifest_ref_uri (adaptive_demux);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (adaptive_demux->downloader, uri, main_uri,
++ adaptive_demux->user_agent, adaptive_demux->cookies,
++ DEFAULT_ADAPTIVE_RETRY, DEFAULT_ADAPTIVE_TIMEOUT, TRUE, TRUE, TRUE, err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (adaptive_demux->downloader, uri, main_uri,
+ TRUE, TRUE, TRUE, err);
++#endif
+ if (download == NULL) {
+ gchar *base_uri;
+
+ if (!update || main_checked || demux->master->is_simple
+ || !gst_adaptive_demux_is_running (GST_ADAPTIVE_DEMUX_CAST (demux))) {
+ g_free (uri);
+ return FALSE;
+ }
+ g_clear_error (err);
+ GST_INFO_OBJECT (demux,
+ "Updating playlist %s failed, attempt to refresh variant playlist %s",
+ uri, main_uri);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download =
++ gst_uri_downloader_fetch_uri (adaptive_demux->downloader, main_uri,
++ NULL, adaptive_demux->user_agent, adaptive_demux->cookies,
++ DEFAULT_ADAPTIVE_RETRY, DEFAULT_ADAPTIVE_TIMEOUT, TRUE, TRUE, TRUE,
++ err);
++#else
+ download =
+ gst_uri_downloader_fetch_uri (adaptive_demux->downloader,
+ main_uri, NULL, TRUE, TRUE, TRUE, err);
++#endif
+ if (download == NULL) {
+ g_free (uri);
+ return FALSE;
+ }
+
+ buf = gst_fragment_get_buffer (download);
+ playlist = gst_hls_src_buf_to_utf8_playlist (buf);
+ gst_buffer_unref (buf);
+
+ if (playlist == NULL) {
+ GST_WARNING_OBJECT (demux,
+ "Failed to validate variant playlist encoding");
+ g_free (uri);
+ g_object_unref (download);
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
+ "Couldn't validate playlist encoding");
+ return FALSE;
+ }
+
+ g_free (uri);
+ if (download->redirect_permanent && download->redirect_uri) {
+ uri = download->redirect_uri;
+ base_uri = NULL;
+ } else {
+ uri = download->uri;
+ base_uri = download->redirect_uri;
+ }
+
+ if (!gst_hls_demux_update_variant_playlist (demux, playlist, uri, base_uri)) {
+ GST_WARNING_OBJECT (demux, "Failed to update the variant playlist");
+ g_object_unref (download);
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED,
+ "Couldn't update playlist");
+ return FALSE;
+ }
+
+ g_object_unref (download);
+
+ main_checked = TRUE;
+ goto retry;
+ }
+ g_free (uri);
+
+ m3u8 = demux->current_variant->m3u8;
+
+ /* Set the base URI of the playlist to the redirect target if any */
+ if (download->redirect_permanent && download->redirect_uri) {
+ gst_m3u8_set_uri (m3u8, download->redirect_uri, NULL,
+ demux->current_variant->name);
+ } else {
+ gst_m3u8_set_uri (m3u8, download->uri, download->redirect_uri,
+ demux->current_variant->name);
+ }
+
+ buf = gst_fragment_get_buffer (download);
+ playlist = gst_hls_src_buf_to_utf8_playlist (buf);
+ gst_buffer_unref (buf);
+ g_object_unref (download);
+
+ if (playlist == NULL) {
+ GST_WARNING_OBJECT (demux, "Couldn't validate playlist encoding");
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
+ "Couldn't validate playlist encoding");
+ return FALSE;
+ }
+
+ if (!gst_m3u8_update (m3u8, playlist)) {
+ GST_WARNING_OBJECT (demux, "Couldn't update playlist");
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED,
+ "Couldn't update playlist");
+ return FALSE;
+ }
+
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ GList *mlist = demux->current_variant->media[i];
+
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+
+ if (media->uri == NULL) {
+ /* No uri means this is a placeholder for a stream
+ * contained in another mux */
+ mlist = mlist->next;
+ continue;
+ }
+ GST_LOG_OBJECT (demux,
+ "Updating playlist for media of type %d - %s, uri: %s", i,
+ media->name, media->uri);
+
+ if (!gst_hls_demux_update_rendition_manifest (demux, media, err))
+ return FALSE;
+
+ mlist = mlist->next;
+ }
+ }
+
+ /* If it's a live source, do not let the sequence number go beyond
+ * three fragments before the end of the list */
+ if (update == FALSE && gst_m3u8_is_live (m3u8)) {
+ gint64 last_sequence, first_sequence;
+
+ GST_M3U8_CLIENT_LOCK (demux->client);
+ last_sequence =
+ GST_M3U8_MEDIA_FILE (g_list_last (m3u8->files)->data)->sequence;
+ first_sequence =
+ GST_M3U8_MEDIA_FILE (g_list_first (m3u8->files)->data)->sequence;
+
+ GST_DEBUG_OBJECT (demux,
+ "sequence:%" G_GINT64_FORMAT " , first_sequence:%" G_GINT64_FORMAT
+ " , last_sequence:%" G_GINT64_FORMAT, m3u8->sequence,
+ first_sequence, last_sequence);
+ if (m3u8->sequence > last_sequence - 3) {
+ //demux->need_segment = TRUE;
+ /* Make sure we never go below the minimum sequence number */
+ m3u8->sequence = MAX (first_sequence, last_sequence - 3);
+ GST_DEBUG_OBJECT (demux,
+ "Sequence is beyond playlist. Moving back to %" G_GINT64_FORMAT,
+ m3u8->sequence);
+ }
+ GST_M3U8_CLIENT_UNLOCK (demux->client);
+ } else if (!gst_m3u8_is_live (m3u8)) {
+ GstClockTime current_pos, target_pos;
+ guint sequence = 0;
+ GList *walk;
+
+ /* Sequence numbers are not guaranteed to be the same in different
+ * playlists, so get the correct fragment here based on the current
+ * position
+ */
+ GST_M3U8_CLIENT_LOCK (demux->client);
+
+ /* Valid because hlsdemux only has a single output */
+ if (GST_ADAPTIVE_DEMUX_CAST (demux)->streams) {
+ GstAdaptiveDemuxStream *stream =
+ GST_ADAPTIVE_DEMUX_CAST (demux)->streams->data;
+ target_pos = stream->segment.position;
+ } else {
+ target_pos = 0;
+ }
+ if (GST_CLOCK_TIME_IS_VALID (m3u8->sequence_position)) {
+ target_pos = MAX (target_pos, m3u8->sequence_position);
+ }
+
+ GST_LOG_OBJECT (demux, "Looking for sequence position %"
+ GST_TIME_FORMAT " in updated playlist", GST_TIME_ARGS (target_pos));
+
+ current_pos = 0;
+ for (walk = m3u8->files; walk; walk = walk->next) {
+ GstM3U8MediaFile *file = walk->data;
+
+ sequence = file->sequence;
+ if (current_pos <= target_pos
+ && target_pos < current_pos + file->duration) {
+ break;
+ }
+ current_pos += file->duration;
+ }
+ /* End of playlist */
+ if (!walk)
+ sequence++;
+ m3u8->sequence = sequence;
+ m3u8->sequence_position = current_pos;
+ GST_M3U8_CLIENT_UNLOCK (demux->client);
+ }
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ GST_DEBUG_OBJECT (demux, "post variant info message");
++ gst_element_post_message (GST_ELEMENT_CAST (demux),
++ gst_message_new_element (GST_OBJECT_CAST (demux),
++ gst_structure_new (GST_ADAPTIVE_DEMUX_VARIANT_MESSAGE_NAME,
++ "video-variant-info", G_TYPE_POINTER,
++ demux->master->variant_info, NULL)));
++#endif
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_hls_demux_change_playlist (GstHLSDemux * demux, guint max_bitrate,
+ gboolean * changed)
+ {
+ GstHLSVariantStream *lowest_variant, *lowest_ivariant;
+ GstHLSVariantStream *previous_variant, *new_variant;
+ gint old_bandwidth, new_bandwidth;
+ GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX_CAST (demux);
+ GstAdaptiveDemuxStream *stream;
+
+ g_return_val_if_fail (adaptive_demux->streams != NULL, FALSE);
+
+ stream = adaptive_demux->streams->data;
+
+ /* Make sure we keep a reference in case we need to switch back */
+ previous_variant = gst_hls_variant_stream_ref (demux->current_variant);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ new_variant =
++ gst_hls_master_playlist_get_variant_for_bandwitdh_limit (demux->master,
++ demux->current_variant, max_bitrate, NULL, adaptive_demux->min_bandwidth,
++ adaptive_demux->max_bandwidth, adaptive_demux->max_width,
++ adaptive_demux->max_height);
++
++ GST_INFO_OBJECT (demux, "new_variant : %d, %d x %d",
++ new_variant->bandwidth, new_variant->width, new_variant->height);
++#else
+ new_variant =
+ gst_hls_master_playlist_get_variant_for_bitrate (demux->master,
+ demux->current_variant, max_bitrate);
++#endif
+ GST_M3U8_CLIENT_LOCK (demux->client);
+
+ retry_failover_protection:
+ old_bandwidth = previous_variant->bandwidth;
+ new_bandwidth = new_variant->bandwidth;
+
+ /* Don't do anything else if the playlist is the same */
+ if (new_bandwidth == old_bandwidth) {
+ GST_M3U8_CLIENT_UNLOCK (demux->client);
+ gst_hls_variant_stream_unref (previous_variant);
+ return TRUE;
+ }
+
+ GST_M3U8_CLIENT_UNLOCK (demux->client);
+
+ gst_hls_demux_set_current_variant (demux, new_variant);
+
+ GST_INFO_OBJECT (demux, "Client was on %dbps, max allowed is %dbps, switching"
+ " to bitrate %dbps", old_bandwidth, max_bitrate, new_bandwidth);
+
+ if (gst_hls_demux_update_playlist (demux, TRUE, NULL)) {
+ const gchar *main_uri;
+ gchar *uri;
+
+ uri = gst_m3u8_get_uri (new_variant->m3u8);
+ main_uri = gst_adaptive_demux_get_manifest_ref_uri (adaptive_demux);
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+ "manifest-uri", G_TYPE_STRING,
+ main_uri, "uri", G_TYPE_STRING,
+ uri, "bitrate", G_TYPE_INT, new_bandwidth, NULL)));
+ g_free (uri);
+ if (changed)
+ *changed = TRUE;
+ stream->discont = TRUE;
+ } else if (gst_adaptive_demux_is_running (GST_ADAPTIVE_DEMUX_CAST (demux))) {
+ GstHLSVariantStream *failover_variant = NULL;
+ GList *failover;
+
+ GST_INFO_OBJECT (demux, "Unable to update playlist. Switching back");
+ GST_M3U8_CLIENT_LOCK (demux->client);
+
+ /* we find variants by bitrate by going from highest to lowest, so it's
+ * possible that there's another variant with the same bitrate before the
+ * one selected which we can use as failover */
+ failover = g_list_find (demux->master->variants, new_variant);
+ if (failover != NULL)
+ failover = failover->prev;
+ if (failover != NULL)
+ failover_variant = failover->data;
+ if (failover_variant && new_bandwidth == failover_variant->bandwidth) {
+ new_variant = failover_variant;
+ goto retry_failover_protection;
+ }
+
+ GST_M3U8_CLIENT_UNLOCK (demux->client);
+ gst_hls_demux_set_current_variant (demux, previous_variant);
+ /* Try a lower bitrate (or stop if we just tried the lowest) */
+ if (previous_variant->iframe) {
+ lowest_ivariant = demux->master->iframe_variants->data;
+ if (new_bandwidth == lowest_ivariant->bandwidth)
+ return FALSE;
+ } else {
+ lowest_variant = demux->master->variants->data;
+ if (new_bandwidth == lowest_variant->bandwidth)
+ return FALSE;
+ }
+ return gst_hls_demux_change_playlist (demux, new_bandwidth - 1, changed);
+ }
+
+ gst_hls_variant_stream_unref (previous_variant);
+ return TRUE;
+ }
+
+ #if defined(HAVE_OPENSSL)
+ static gboolean
+ gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+ const guint8 * key_data, const guint8 * iv_data)
+ {
+ EVP_CIPHER_CTX *ctx;
+ #if OPENSSL_VERSION_NUMBER < 0x10100000L
+ EVP_CIPHER_CTX_init (&stream->aes_ctx);
+ ctx = &stream->aes_ctx;
+ #else
+ stream->aes_ctx = EVP_CIPHER_CTX_new ();
+ ctx = stream->aes_ctx;
+ #endif
+ if (!EVP_DecryptInit_ex (ctx, EVP_aes_128_cbc (), NULL, key_data, iv_data))
+ return FALSE;
+ EVP_CIPHER_CTX_set_padding (ctx, 0);
+ return TRUE;
+ }
+
+ static gboolean
+ decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+ const guint8 * encrypted_data, guint8 * decrypted_data)
+ {
+ int len, flen = 0;
+ EVP_CIPHER_CTX *ctx;
+
+ #if OPENSSL_VERSION_NUMBER < 0x10100000L
+ ctx = &stream->aes_ctx;
+ #else
+ ctx = stream->aes_ctx;
+ #endif
+
+ if (G_UNLIKELY (length > G_MAXINT || length % 16 != 0))
+ return FALSE;
+
+ len = (int) length;
+ if (!EVP_DecryptUpdate (ctx, decrypted_data, &len, encrypted_data, len))
+ return FALSE;
+ EVP_DecryptFinal_ex (ctx, decrypted_data + len, &flen);
+ g_return_val_if_fail (len + flen == length, FALSE);
+ return TRUE;
+ }
+
+ static void
+ gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+ {
+ #if OPENSSL_VERSION_NUMBER < 0x10100000L
+ EVP_CIPHER_CTX_cleanup (&stream->aes_ctx);
+ #else
+ EVP_CIPHER_CTX_free (stream->aes_ctx);
+ stream->aes_ctx = NULL;
+ #endif
+ }
+
+ #elif defined(HAVE_NETTLE)
+ static gboolean
+ gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+ const guint8 * key_data, const guint8 * iv_data)
+ {
+ aes128_set_decrypt_key (&stream->aes_ctx.ctx, key_data);
+ CBC_SET_IV (&stream->aes_ctx, iv_data);
+
+ return TRUE;
+ }
+
+ static gboolean
+ decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+ const guint8 * encrypted_data, guint8 * decrypted_data)
+ {
+ if (length % 16 != 0)
+ return FALSE;
+
+ CBC_DECRYPT (&stream->aes_ctx, aes128_decrypt, length, decrypted_data,
+ encrypted_data);
+
+ return TRUE;
+ }
+
+ static void
+ gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+ {
+ /* NOP */
+ }
+
+ #elif defined(HAVE_LIBGCRYPT)
+ static gboolean
+ gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+ const guint8 * key_data, const guint8 * iv_data)
+ {
+ gcry_error_t err = 0;
+ gboolean ret = FALSE;
+
+ err =
+ gcry_cipher_open (&stream->aes_ctx, GCRY_CIPHER_AES128,
+ GCRY_CIPHER_MODE_CBC, 0);
+ if (err)
+ goto out;
+ err = gcry_cipher_setkey (stream->aes_ctx, key_data, 16);
+ if (err)
+ goto out;
+ err = gcry_cipher_setiv (stream->aes_ctx, iv_data, 16);
+ if (!err)
+ ret = TRUE;
+
+ out:
+ if (!ret)
+ if (stream->aes_ctx)
+ gcry_cipher_close (stream->aes_ctx);
+
+ return ret;
+ }
+
+ static gboolean
+ decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+ const guint8 * encrypted_data, guint8 * decrypted_data)
+ {
+ gcry_error_t err = 0;
+
+ err = gcry_cipher_decrypt (stream->aes_ctx, decrypted_data, length,
+ encrypted_data, length);
+
+ return err == 0;
+ }
+
+ static void
+ gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+ {
+ if (stream->aes_ctx) {
+ gcry_cipher_close (stream->aes_ctx);
+ stream->aes_ctx = NULL;
+ }
+ }
+
+ #else
+ /* NO crypto available */
+ static gboolean
+ gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+ const guint8 * key_data, const guint8 * iv_data)
+ {
+ GST_ERROR ("No crypto available");
+ return FALSE;
+ }
+
+ static gboolean
+ decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+ const guint8 * encrypted_data, guint8 * decrypted_data)
+ {
+ GST_ERROR ("Cannot decrypt fragment, no crypto available");
+ return FALSE;
+ }
+
+ static void
+ gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+ {
+ return;
+ }
+ #endif
+
+ static GstBuffer *
+ gst_hls_demux_decrypt_fragment (GstHLSDemux * demux, GstHLSDemuxStream * stream,
+ GstBuffer * encrypted_buffer, GError ** err)
+ {
+ GstBuffer *decrypted_buffer = NULL;
+ GstMapInfo encrypted_info, decrypted_info;
+
+ decrypted_buffer =
+ gst_buffer_new_allocate (NULL, gst_buffer_get_size (encrypted_buffer),
+ NULL);
+
+ gst_buffer_map (encrypted_buffer, &encrypted_info, GST_MAP_READ);
+ gst_buffer_map (decrypted_buffer, &decrypted_info, GST_MAP_WRITE);
+
+ if (!decrypt_fragment (stream, encrypted_info.size,
+ encrypted_info.data, decrypted_info.data))
+ goto decrypt_error;
+
+
+ gst_buffer_unmap (decrypted_buffer, &decrypted_info);
+ gst_buffer_unmap (encrypted_buffer, &encrypted_info);
+
+ gst_buffer_unref (encrypted_buffer);
+
+ return decrypted_buffer;
+
+ decrypt_error:
+ GST_ERROR_OBJECT (demux, "Failed to decrypt fragment");
+ g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_DECRYPT,
+ "Failed to decrypt fragment");
+
+ gst_buffer_unmap (decrypted_buffer, &decrypted_info);
+ gst_buffer_unmap (encrypted_buffer, &encrypted_info);
+
+ gst_buffer_unref (encrypted_buffer);
+ gst_buffer_unref (decrypted_buffer);
+
+ return NULL;
+ }
+
+ static gint64
+ gst_hls_demux_get_manifest_update_interval (GstAdaptiveDemux * demux)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ GstClockTime target_duration;
+
+ if (hlsdemux->current_variant) {
+ target_duration =
+ gst_m3u8_get_target_duration (hlsdemux->current_variant->m3u8);
+ } else {
+ target_duration = 5 * GST_SECOND;
+ }
+
+ return gst_util_uint64_scale (target_duration, G_USEC_PER_SEC, GST_SECOND);
+ }
+
+ static gboolean
+ gst_hls_demux_get_live_seek_range (GstAdaptiveDemux * demux, gint64 * start,
+ gint64 * stop)
+ {
+ GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+ gboolean ret = FALSE;
+
+ if (hlsdemux->current_variant) {
+ ret =
+ gst_m3u8_get_seek_range (hlsdemux->current_variant->m3u8, start, stop);
+ }
+
+ return ret;
+ }
++
++#ifdef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
++static gboolean
++gst_hlsdemux_set_stream_event (GstAdaptiveDemuxStream * stream,
++ GstHLSMedia * media)
++{
++ GstStructure *structure;
++ GstEvent *event;
++
++ if (!stream) {
++ GST_WARNING ("stream is NULL");
++ return FALSE;
++ }
++
++ if (!media) {
++ GST_WARNING ("media is NULL");
++ return FALSE;
++ }
++
++ if (media->lang)
++ gst_hlsdemux_set_language_tags (stream, media->lang);
++
++ structure =
++ gst_structure_new ("GstHLSMedia", "mtype", G_TYPE_INT, media->mtype,
++ "default", G_TYPE_BOOLEAN, media->is_default, "autoselect",
++ G_TYPE_BOOLEAN, media->autoselect, "forced", G_TYPE_BOOLEAN,
++ media->forced, NULL);
++
++ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, structure);
++
++ gst_adaptive_demux_stream_queue_event (stream, event);
++
++ return TRUE;
++}
++
++static gboolean
++gst_hlsdemux_set_language_tags (GstAdaptiveDemuxStream * stream,
++ const gchar * language)
++{
++ GstTagList *lang_tag = NULL;
++
++ if (!stream)
++ return FALSE;
++
++ if (gst_tag_check_language_code (language))
++ lang_tag = gst_tag_list_new (GST_TAG_LANGUAGE_CODE, language, NULL);
++ else
++ lang_tag = gst_tag_list_new (GST_TAG_LANGUAGE_NAME, language, NULL);
++
++ if (!lang_tag)
++ return FALSE;
++
++ gst_adaptive_demux_stream_set_tags (stream, lang_tag);
++
++ return TRUE;
++}
++#endif
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * gsthlsdemux.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ #ifndef __GST_HLS_DEMUX_H__
+ #define __GST_HLS_DEMUX_H__
+
+ #include <gst/gst.h>
+ #include "m3u8.h"
+ #include <gst/adaptivedemux/gstadaptivedemux.h>
+ #if defined(HAVE_OPENSSL)
+ #include <openssl/evp.h>
+ #elif defined(HAVE_NETTLE)
+ #include <nettle/aes.h>
+ #include <nettle/cbc.h>
+ #elif defined(HAVE_LIBGCRYPT)
+ #include <gcrypt.h>
+ #endif
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_HLS_DEMUX \
+ (gst_hls_demux_get_type())
+ #define GST_HLS_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_HLS_DEMUX,GstHLSDemux))
+ #define GST_HLS_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_HLS_DEMUX,GstHLSDemuxClass))
+ #define GST_IS_HLS_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_HLS_DEMUX))
+ #define GST_IS_HLS_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_HLS_DEMUX))
+ #define GST_HLS_DEMUX_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_HLS_DEMUX,GstHLSDemuxClass))
+ #define GST_HLS_DEMUX_CAST(obj) \
+ ((GstHLSDemux *)obj)
+
+ typedef struct _GstHLSDemux GstHLSDemux;
+ typedef struct _GstHLSDemuxClass GstHLSDemuxClass;
+ typedef struct _GstHLSDemuxStream GstHLSDemuxStream;
+ typedef struct _GstHLSTSReader GstHLSTSReader;
+
+ #define GST_HLS_DEMUX_STREAM_CAST(stream) ((GstHLSDemuxStream *)(stream))
+
+ typedef enum {
+ GST_HLS_TSREADER_NONE,
+ GST_HLS_TSREADER_MPEGTS,
+ GST_HLS_TSREADER_ID3
+ } GstHLSTSReaderType;
+
+ struct _GstHLSTSReader
+ {
+ GstHLSTSReaderType rtype;
+ gboolean have_id3;
+
+ gint packet_size;
+ gint pmt_pid;
+ gint pcr_pid;
+
+ GstClockTime last_pcr;
+ GstClockTime first_pcr;
+ };
+
+ struct _GstHLSDemuxStream
+ {
+ GstAdaptiveDemuxStream adaptive_demux_stream;
+
+ GstHLSTSReaderType stream_type;
+
+ GstM3U8 *playlist;
+ gboolean is_primary_playlist;
+
+ gboolean do_typefind; /* Whether we need to typefind the next buffer */
+ GstBuffer *pending_typefind_buffer; /* for collecting data until typefind succeeds */
+
+ GstAdapter *pending_encrypted_data; /* for chunking data into 16 byte multiples for decryption */
+ GstBuffer *pending_decrypted_buffer; /* last decrypted buffer for pkcs7 unpadding.
+ We only know that it is the last at EOS */
+ guint64 current_offset; /* offset we're currently at */
+ gboolean reset_pts;
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT
++ GstClockTime sequence_pos;
++ GstClockTime last_pcr;
++#endif
+
+ /* decryption tooling */
+ #if defined(HAVE_OPENSSL)
+ # if OPENSSL_VERSION_NUMBER < 0x10100000L
+ EVP_CIPHER_CTX aes_ctx;
+ # else
+ EVP_CIPHER_CTX *aes_ctx;
+ # endif
+ #elif defined(HAVE_NETTLE)
+ struct CBC_CTX (struct aes128_ctx, AES_BLOCK_SIZE) aes_ctx;
+ #elif defined(HAVE_LIBGCRYPT)
+ gcry_cipher_hd_t aes_ctx;
+ #endif
+
+ gchar *current_key;
+ guint8 *current_iv;
+
+ /* Accumulator for reading PAT/PMT/PCR from
+ * the stream so we can set timestamps/segments
+ * and switch cleanly */
+ GstBuffer *pending_pcr_buffer;
++#ifdef TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE
++ gint failed_count;
++#endif
+ GstHLSTSReader tsreader;
+ };
+
+ typedef struct {
+ guint8 data[16];
+ } GstHLSKey;
+
+ /**
+ * GstHLSDemux:
+ *
+ * Opaque #GstHLSDemux data structure.
+ */
+ struct _GstHLSDemux
+ {
+ GstAdaptiveDemux parent;
+
+ gint srcpad_counter;
+
+ /* Decryption key cache: url => GstHLSKey */
+ GHashTable *keys;
+ GMutex keys_lock;
+
+ /* FIXME: check locking, protected automatically by manifest_lock already? */
+ /* The master playlist with the available variant streams */
+ GstHLSMasterPlaylist *master;
+
+ GstHLSVariantStream *current_variant;
+ /* The previous variant, used to transition streams over */
+ GstHLSVariantStream *previous_variant;
+
+ gboolean streams_aware;
+ };
+
+ struct _GstHLSDemuxClass
+ {
+ GstAdaptiveDemuxClass parent_class;
+ };
+
+
+ void gst_hlsdemux_tsreader_init (GstHLSTSReader *r);
+ void gst_hlsdemux_tsreader_set_type (GstHLSTSReader *r, GstHLSTSReaderType rtype);
+
+ gboolean gst_hlsdemux_tsreader_find_pcrs (GstHLSTSReader *r, GstBuffer **buffer,
+ GstClockTime *first_pcr, GstClockTime *last_pcr, GstTagList **tags);
+
+ GType gst_hls_demux_get_type (void);
+
+ G_END_DECLS
+ #endif /* __GST_HLS_DEMUX_H__ */
--- /dev/null
- } else {
+ /* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * m3u8.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
+ #include <stdlib.h>
+ #include <math.h>
+ #include <errno.h>
+ #include <glib.h>
+ #include <string.h>
+
+ #include "m3u8.h"
+ #include "gsthlselements.h"
+
+ #define GST_CAT_DEFAULT hls_debug
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++#define DEFAULT_RESOLUTION_LIMIT -1
++#define DEFAULT_BANDWIDTH_LIMIT -1
++#endif
+
+ static GstM3U8MediaFile *gst_m3u8_media_file_new (gchar * uri,
+ gchar * title, GstClockTime duration, guint sequence);
+ static void gst_m3u8_init_file_unref (GstM3U8InitFile * self);
+ static gchar *uri_join (const gchar * uri, const gchar * path);
+
+ GstM3U8 *
+ gst_m3u8_new (void)
+ {
+ GstM3U8 *m3u8;
+
+ m3u8 = g_new0 (GstM3U8, 1);
+
+ m3u8->current_file = NULL;
+ m3u8->current_file_duration = GST_CLOCK_TIME_NONE;
+ m3u8->sequence = -1;
+ m3u8->sequence_position = 0;
+ m3u8->highest_sequence_number = -1;
+ m3u8->duration = GST_CLOCK_TIME_NONE;
++#ifdef TIZEN_FEATURE_AD
++ m3u8->ad_info = g_new0 (GstM3U8AdInfo, 1);
++#endif
+
+ g_mutex_init (&m3u8->lock);
+ m3u8->ref_count = 1;
+
+ return m3u8;
+ }
+
+ /* call with M3U8_LOCK held */
+ static void
+ gst_m3u8_take_uri (GstM3U8 * self, gchar * uri, gchar * base_uri, gchar * name)
+ {
+ g_return_if_fail (self != NULL);
+
+ if (self->uri != uri) {
+ g_free (self->uri);
+ self->uri = uri;
+ }
+ if (self->base_uri != base_uri) {
+ g_free (self->base_uri);
+ self->base_uri = base_uri;
+ }
+ if (self->name != name) {
+ g_free (self->name);
+ self->name = name;
+ }
+ }
+
+ void
+ gst_m3u8_set_uri (GstM3U8 * m3u8, const gchar * uri, const gchar * base_uri,
+ const gchar * name)
+ {
+ GST_M3U8_LOCK (m3u8);
+ gst_m3u8_take_uri (m3u8, g_strdup (uri), g_strdup (base_uri),
+ g_strdup (name));
+ GST_M3U8_UNLOCK (m3u8);
+ }
+
++#ifdef TIZEN_FEATURE_AD
++static GstM3U8Cue *
++gst_m3u8_cue_info_new (GstClockTime start_time, GstClockTime duration)
++{
++ GstM3U8Cue *ad;
++
++ ad = g_new0 (GstM3U8Cue, 1);
++ ad->start_time = start_time;
++ ad->end_time = 0;
++ ad->duration = duration;
++
++ return ad;
++}
++
++void
++gst_m3u8_cue_cont_free (GstM3U8CueOutCont * self)
++{
++ g_return_if_fail (self != NULL);
++ g_free (self->cont_data);
++}
++#endif
++
+ GstM3U8 *
+ gst_m3u8_ref (GstM3U8 * m3u8)
+ {
+ g_assert (m3u8 != NULL && m3u8->ref_count > 0);
+
+ g_atomic_int_add (&m3u8->ref_count, 1);
+ return m3u8;
+ }
+
+ void
+ gst_m3u8_unref (GstM3U8 * self)
+ {
+ g_return_if_fail (self != NULL && self->ref_count > 0);
+
+ if (g_atomic_int_dec_and_test (&self->ref_count)) {
+ g_free (self->uri);
+ g_free (self->base_uri);
+ g_free (self->name);
+
+ g_list_foreach (self->files, (GFunc) gst_m3u8_media_file_unref, NULL);
+ g_list_free (self->files);
+
++#ifdef TIZEN_FEATURE_AD
++ if (self->ad_info) {
++ g_list_free (self->ad_info->cue);
++ g_list_free_full(self->ad_info->cue_cont, (GDestroyNotify) gst_m3u8_cue_cont_free);
++ g_free (self->ad_info);
++ }
++#endif
++
+ g_free (self->last_data);
+ g_mutex_clear (&self->lock);
+ g_free (self);
+ }
+ }
+
+ static GstM3U8MediaFile *
+ gst_m3u8_media_file_new (gchar * uri, gchar * title, GstClockTime duration,
+ guint sequence)
+ {
+ GstM3U8MediaFile *file;
+
+ file = g_new0 (GstM3U8MediaFile, 1);
+ file->uri = uri;
+ file->title = title;
+ file->duration = duration;
+ file->sequence = sequence;
+ file->ref_count = 1;
+
+ return file;
+ }
+
+ GstM3U8MediaFile *
+ gst_m3u8_media_file_ref (GstM3U8MediaFile * mfile)
+ {
+ g_assert (mfile != NULL && mfile->ref_count > 0);
+
+ g_atomic_int_add (&mfile->ref_count, 1);
+ return mfile;
+ }
+
+ void
+ gst_m3u8_media_file_unref (GstM3U8MediaFile * self)
+ {
+ g_return_if_fail (self != NULL && self->ref_count > 0);
+
+ if (g_atomic_int_dec_and_test (&self->ref_count)) {
+ if (self->init_file)
+ gst_m3u8_init_file_unref (self->init_file);
+ g_free (self->title);
+ g_free (self->uri);
+ g_free (self->key);
+ g_free (self);
+ }
+ }
+
+ static GstM3U8InitFile *
+ gst_m3u8_init_file_new (gchar * uri)
+ {
+ GstM3U8InitFile *file;
+
+ file = g_new0 (GstM3U8InitFile, 1);
+ file->uri = uri;
+ file->ref_count = 1;
+
+ return file;
+ }
+
+ static GstM3U8InitFile *
+ gst_m3u8_init_file_ref (GstM3U8InitFile * ifile)
+ {
+ g_assert (ifile != NULL && ifile->ref_count > 0);
+
+ g_atomic_int_add (&ifile->ref_count, 1);
+ return ifile;
+ }
+
+ static void
+ gst_m3u8_init_file_unref (GstM3U8InitFile * self)
+ {
+ g_return_if_fail (self != NULL && self->ref_count > 0);
+
+ if (g_atomic_int_dec_and_test (&self->ref_count)) {
+ g_free (self->uri);
+ g_free (self);
+ }
+ }
+
+ static gboolean
+ int_from_string (gchar * ptr, gchar ** endptr, gint * val)
+ {
+ gchar *end;
+ gint64 ret;
+
+ g_return_val_if_fail (ptr != NULL, FALSE);
+ g_return_val_if_fail (val != NULL, FALSE);
+
+ errno = 0;
+ ret = g_ascii_strtoll (ptr, &end, 10);
+ if ((errno == ERANGE && (ret == G_MAXINT64 || ret == G_MININT64))
+ || (errno != 0 && ret == 0)) {
+ GST_WARNING ("%s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (ret > G_MAXINT || ret < G_MININT) {
+ GST_WARNING ("%s", g_strerror (ERANGE));
+ return FALSE;
+ }
+
+ if (endptr)
+ *endptr = end;
+
+ *val = (gint) ret;
+
+ return end != ptr;
+ }
+
+ static gboolean
+ int64_from_string (gchar * ptr, gchar ** endptr, gint64 * val)
+ {
+ gchar *end;
+ gint64 ret;
+
+ g_return_val_if_fail (ptr != NULL, FALSE);
+ g_return_val_if_fail (val != NULL, FALSE);
+
+ errno = 0;
+ ret = g_ascii_strtoll (ptr, &end, 10);
+ if ((errno == ERANGE && (ret == G_MAXINT64 || ret == G_MININT64))
+ || (errno != 0 && ret == 0)) {
+ GST_WARNING ("%s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (endptr)
+ *endptr = end;
+
+ *val = ret;
+
+ return end != ptr;
+ }
+
+ static gboolean
+ double_from_string (gchar * ptr, gchar ** endptr, gdouble * val)
+ {
+ gchar *end;
+ gdouble ret;
+
+ g_return_val_if_fail (ptr != NULL, FALSE);
+ g_return_val_if_fail (val != NULL, FALSE);
+
+ errno = 0;
+ ret = g_ascii_strtod (ptr, &end);
+ if ((errno == ERANGE && (ret == HUGE_VAL || ret == -HUGE_VAL))
+ || (errno != 0 && ret == 0)) {
+ GST_WARNING ("%s", g_strerror (errno));
+ return FALSE;
+ }
+
+ if (!isfinite (ret)) {
+ GST_WARNING ("%s", g_strerror (ERANGE));
+ return FALSE;
+ }
+
+ if (endptr)
+ *endptr = end;
+
+ *val = (gdouble) ret;
+
+ return end != ptr;
+ }
+
+ static gboolean
+ parse_attributes (gchar ** ptr, gchar ** a, gchar ** v)
+ {
+ gchar *end = NULL, *p, *ve;
+
+ g_return_val_if_fail (ptr != NULL, FALSE);
+ g_return_val_if_fail (*ptr != NULL, FALSE);
+ g_return_val_if_fail (a != NULL, FALSE);
+ g_return_val_if_fail (v != NULL, FALSE);
+
+ /* [attribute=value,]* */
+
+ *a = *ptr;
+ end = p = g_utf8_strchr (*ptr, -1, ',');
+ if (end) {
+ gchar *q = g_utf8_strchr (*ptr, -1, '"');
+ if (q && q < end) {
+ /* special case, such as CODECS="avc1.77.30, mp4a.40.2" */
+ q = g_utf8_next_char (q);
+ if (q) {
+ q = g_utf8_strchr (q, -1, '"');
+ }
+ if (q) {
+ end = p = g_utf8_strchr (q, -1, ',');
+ }
+ }
+ }
+ if (end) {
+ do {
+ end = g_utf8_next_char (end);
+ } while (end && *end == ' ');
+ *p = '\0';
+ }
+
+ *v = p = g_utf8_strchr (*ptr, -1, '=');
+ if (*v) {
+ *p = '\0';
+ *v = g_utf8_next_char (*v);
+ if (**v == '"') {
+ ve = g_utf8_next_char (*v);
+ if (ve) {
+ ve = g_utf8_strchr (ve, -1, '"');
+ }
+ if (ve) {
+ *v = g_utf8_next_char (*v);
+ *ve = '\0';
+ } else {
+ GST_WARNING ("Cannot remove quotation marks from %s", *a);
+ }
+ }
+ } else {
+ GST_WARNING ("missing = after attribute");
+ return FALSE;
+ }
+
+ *ptr = end;
+ return TRUE;
+ }
+
+ static gint
+ gst_hls_variant_stream_compare_by_bitrate (gconstpointer a, gconstpointer b)
+ {
+ const GstHLSVariantStream *vs_a = (const GstHLSVariantStream *) a;
+ const GstHLSVariantStream *vs_b = (const GstHLSVariantStream *) b;
+
+ if (vs_a->bandwidth == vs_b->bandwidth)
+ return g_strcmp0 (vs_a->name, vs_b->name);
+
+ return vs_a->bandwidth - vs_b->bandwidth;
+ }
+
+ /* If we have MEDIA-SEQUENCE, ensure that it's consistent. If it is not,
+ * the client SHOULD halt playback (6.3.4), which is what we do then. */
+ static gboolean
+ check_media_seqnums (GstM3U8 * self, GList * previous_files)
+ {
+ GList *l, *m;
+ GstM3U8MediaFile *f1 = NULL, *f2 = NULL;
+
+ g_return_val_if_fail (previous_files, FALSE);
+
+ if (!self->files) {
+ /* Empty playlists are trivially consistent */
+ return TRUE;
+ }
+
+ /* Find first case of higher/equal sequence number in new playlist.
+ * From there on we can linearly step ahead */
+ for (l = self->files; l; l = l->next) {
+ gboolean match = FALSE;
+
+ f1 = l->data;
+ for (m = previous_files; m; m = m->next) {
+ f2 = m->data;
+
+ if (f1->sequence >= f2->sequence) {
+ match = TRUE;
+ break;
+ }
+ }
+ if (match)
+ break;
+ }
+
+ /* We must have seen at least one entry on each list */
+ g_assert (f1 != NULL);
+ g_assert (f2 != NULL);
+
+ if (!l) {
+ /* No match, no sequence in the new playlist was higher than
+ * any in the old. This is bad! */
+ GST_ERROR ("Media sequence doesn't continue: last new %" G_GINT64_FORMAT
+ " < last old %" G_GINT64_FORMAT, f1->sequence, f2->sequence);
+ return FALSE;
+ }
+
+ for (; l && m; l = l->next, m = m->next) {
+ f1 = l->data;
+ f2 = m->data;
+
+ if (f1->sequence == f2->sequence && !g_str_equal (f1->uri, f2->uri)) {
+ /* Same sequence, different URI. This is bad! */
+ GST_ERROR ("Media URIs inconsistent (sequence %" G_GINT64_FORMAT
+ "): had '%s', got '%s'", f1->sequence, f2->uri, f1->uri);
+ return FALSE;
+ } else if (f1->sequence < f2->sequence) {
+ /* Not same sequence but by construction sequence must be higher in the
+ * new one. All good in that case, if it isn't then this means that
+ * sequence numbers are decreasing or files were inserted */
+ GST_ERROR ("Media sequences inconsistent: %" G_GINT64_FORMAT " < %"
+ G_GINT64_FORMAT ": URIs new '%s' old '%s'", f1->sequence,
+ f2->sequence, f1->uri, f2->uri);
+ return FALSE;
+ }
+ }
+
+ /* All good if we're getting here */
+ return TRUE;
+ }
+
+ /* If we don't have MEDIA-SEQUENCE, we check URIs in the previous and
+ * current playlist to calculate the/a correct MEDIA-SEQUENCE for the new
+ * playlist in relation to the old. That is, same URIs get the same number
+ * and later URIs get higher numbers */
+ static void
+ generate_media_seqnums (GstM3U8 * self, GList * previous_files)
+ {
+ GList *l, *m;
+ GstM3U8MediaFile *f1 = NULL, *f2 = NULL;
+ gint64 mediasequence;
+
+ g_return_if_fail (previous_files);
+
+ /* Find first case of same URI in new playlist.
+ * From there on we can linearly step ahead */
+ for (l = self->files; l; l = l->next) {
+ gboolean match = FALSE;
+
+ f1 = l->data;
+ for (m = previous_files; m; m = m->next) {
+ f2 = m->data;
+
+ if (g_str_equal (f1->uri, f2->uri)) {
+ match = TRUE;
+ break;
+ }
+ }
+
+ if (match)
+ break;
+ }
+
+ if (l) {
+ /* Match, check that all following ones are matching too and continue
+ * sequence numbers from there on */
+
+ mediasequence = f2->sequence;
+
+ for (; l && m; l = l->next, m = m->next) {
+ f1 = l->data;
+ f2 = m->data;
+
+ f1->sequence = mediasequence;
+ mediasequence++;
+
+ if (!g_str_equal (f1->uri, f2->uri)) {
+ GST_WARNING ("Inconsistent URIs after playlist update: '%s' != '%s'",
+ f1->uri, f2->uri);
+ }
+ }
+ } else {
+ /* No match, this means f2 is the last item in the previous playlist
+ * and we have to start our new playlist at that sequence */
+ mediasequence = f2->sequence + 1;
+ l = self->files;
+ }
+
+ for (; l; l = l->next) {
+ f1 = l->data;
+
+ f1->sequence = mediasequence;
+ mediasequence++;
+ }
+ }
+
+ /*
+ * @data: a m3u8 playlist text data, taking ownership
+ */
+ gboolean
+ gst_m3u8_update (GstM3U8 * self, gchar * data)
+ {
+ gint val;
+ GstClockTime duration;
+ gchar *title, *end;
+ gboolean discontinuity = FALSE;
+ gchar *current_key = NULL;
+ gboolean have_iv = FALSE;
+ guint8 iv[16] = { 0, };
+ gint64 size = -1, offset = -1;
+ gint64 mediasequence;
+ GList *previous_files = NULL;
+ gboolean have_mediasequence = FALSE;
+ GstM3U8InitFile *last_init_file = NULL;
++#ifdef TIZEN_FEATURE_AD
++ GstClockTime timestamp = 0;
++#endif
+
+ g_return_val_if_fail (self != NULL, FALSE);
+ g_return_val_if_fail (data != NULL, FALSE);
+
+ GST_M3U8_LOCK (self);
+
+ /* check if the data changed since last update */
+ if (self->last_data && g_str_equal (self->last_data, data)) {
+ GST_DEBUG ("Playlist is the same as previous one");
+ g_free (data);
+ GST_M3U8_UNLOCK (self);
+ return TRUE;
+ }
+
+ if (!g_str_has_prefix (data, "#EXTM3U")) {
+ GST_WARNING ("Data doesn't start with #EXTM3U");
+ g_free (data);
+ GST_M3U8_UNLOCK (self);
+ return FALSE;
+ }
+
+ if (g_strrstr (data, "\n#EXT-X-STREAM-INF:") != NULL) {
+ GST_WARNING ("Not a media playlist, but a master playlist!");
+ GST_M3U8_UNLOCK (self);
+ return FALSE;
+ }
+
+ GST_TRACE ("data:\n%s", data);
+
+ g_free (self->last_data);
+ self->last_data = data;
+
+ self->current_file = NULL;
+ previous_files = self->files;
+ self->files = NULL;
+ self->duration = GST_CLOCK_TIME_NONE;
+ mediasequence = 0;
+
+ /* By default, allow caching */
+ self->allowcache = TRUE;
+
+ duration = 0;
+ title = NULL;
+ data += 7;
+ while (TRUE) {
+ gchar *r;
+
+ end = g_utf8_strchr (data, -1, '\n');
+ if (end)
+ *end = '\0';
+
+ r = g_utf8_strchr (data, -1, '\r');
+ if (r)
+ *r = '\0';
+
+ if (data[0] != '#' && data[0] != '\0') {
+ if (duration <= 0) {
+ GST_LOG ("%s: got line without EXTINF, dropping", data);
+ goto next_line;
+ }
+
+ data = uri_join (self->base_uri ? self->base_uri : self->uri, data);
+ if (data != NULL) {
+ GstM3U8MediaFile *file;
+ file = gst_m3u8_media_file_new (data, title, duration, mediasequence++);
+
+ /* set encryption params */
+ file->key = current_key ? g_strdup (current_key) : NULL;
+ if (file->key) {
+ if (have_iv) {
+ memcpy (file->iv, iv, sizeof (iv));
+ } else {
+ guint8 *iv = file->iv + 12;
+ GST_WRITE_UINT32_BE (iv, file->sequence);
+ }
+ }
+
+ if (size != -1) {
+ file->size = size;
+ if (offset != -1) {
+ file->offset = offset;
+ } else {
+ GstM3U8MediaFile *prev = self->files ? self->files->data : NULL;
+
+ if (!prev) {
+ offset = 0;
+ } else {
+ offset = prev->offset + prev->size;
+ }
+ file->offset = offset;
+ }
+ } else {
+ file->size = -1;
+ file->offset = 0;
+ }
+
+ file->discont = discontinuity;
+ if (last_init_file)
+ file->init_file = gst_m3u8_init_file_ref (last_init_file);
+
++#ifdef TIZEN_FEATURE_AD
++ timestamp += duration;
++#endif
+ duration = 0;
+ title = NULL;
+ discontinuity = FALSE;
+ size = offset = -1;
+ self->files = g_list_prepend (self->files, file);
+ }
+
+ } else if (g_str_has_prefix (data, "#EXTINF:")) {
+ gdouble fval;
+ if (!double_from_string (data + 8, &data, &fval)) {
+ GST_WARNING ("Can't read EXTINF duration");
+ goto next_line;
+ }
+ duration = fval * (gdouble) GST_SECOND;
+ if (self->targetduration > 0 && duration > self->targetduration) {
+ GST_WARNING ("EXTINF duration (%" GST_TIME_FORMAT
+ ") > TARGETDURATION (%" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (duration), GST_TIME_ARGS (self->targetduration));
+ }
+ if (!data || *data != ',')
+ goto next_line;
+ data = g_utf8_next_char (data);
+ if (data != end) {
+ g_free (title);
+ title = g_strdup (data);
+ }
+ } else if (g_str_has_prefix (data, "#EXT-X-")) {
+ gchar *data_ext_x = data + 7;
+
+ /* All these entries start with #EXT-X- */
+ if (g_str_has_prefix (data_ext_x, "ENDLIST")) {
+ self->endlist = TRUE;
+ } else if (g_str_has_prefix (data_ext_x, "VERSION:")) {
+ if (int_from_string (data + 15, &data, &val))
+ self->version = val;
+ } else if (g_str_has_prefix (data_ext_x, "TARGETDURATION:")) {
+ if (int_from_string (data + 22, &data, &val))
+ self->targetduration = val * GST_SECOND;
+ } else if (g_str_has_prefix (data_ext_x, "MEDIA-SEQUENCE:")) {
+ if (int_from_string (data + 22, &data, &val)) {
+ mediasequence = val;
+ have_mediasequence = TRUE;
+ }
+ } else if (g_str_has_prefix (data_ext_x, "DISCONTINUITY-SEQUENCE:")) {
+ if (int_from_string (data + 30, &data, &val)
+ && val != self->discont_sequence) {
+ self->discont_sequence = val;
+ discontinuity = TRUE;
+ }
+ } else if (g_str_has_prefix (data_ext_x, "DISCONTINUITY")) {
+ self->discont_sequence++;
+ discontinuity = TRUE;
+ } else if (g_str_has_prefix (data_ext_x, "PROGRAM-DATE-TIME:")) {
+ /* <YYYY-MM-DDThh:mm:ssZ> */
+ GST_DEBUG ("FIXME parse date");
+ } else if (g_str_has_prefix (data_ext_x, "ALLOW-CACHE:")) {
+ self->allowcache = g_ascii_strcasecmp (data + 19, "YES") == 0;
+ } else if (g_str_has_prefix (data_ext_x, "KEY:")) {
+ gchar *v, *a;
+
+ data = data + 11;
+
+ /* IV and KEY are only valid until the next #EXT-X-KEY */
+ have_iv = FALSE;
+ g_free (current_key);
+ current_key = NULL;
+ while (data && parse_attributes (&data, &a, &v)) {
+ if (g_str_equal (a, "URI")) {
+ current_key =
+ uri_join (self->base_uri ? self->base_uri : self->uri, v);
+ } else if (g_str_equal (a, "IV")) {
+ gchar *ivp = v;
+ gint i;
+
+ if (strlen (ivp) < 32 + 2 || (!g_str_has_prefix (ivp, "0x")
+ && !g_str_has_prefix (ivp, "0X"))) {
+ GST_WARNING ("Can't read IV");
+ continue;
+ }
+
+ ivp += 2;
+ for (i = 0; i < 16; i++) {
+ gint h, l;
+
+ h = g_ascii_xdigit_value (*ivp);
+ ivp++;
+ l = g_ascii_xdigit_value (*ivp);
+ ivp++;
+ if (h == -1 || l == -1) {
+ i = -1;
+ break;
+ }
+ iv[i] = (h << 4) | l;
+ }
+
+ if (i == -1) {
+ GST_WARNING ("Can't read IV");
+ continue;
+ }
+ have_iv = TRUE;
+ } else if (g_str_equal (a, "METHOD")) {
+ if (!g_str_equal (v, "AES-128")) {
+ GST_WARNING ("Encryption method %s not supported", v);
+ continue;
+ }
+ }
+ }
+ } else if (g_str_has_prefix (data_ext_x, "BYTERANGE:")) {
+ gchar *v = data + 17;
+
+ if (int64_from_string (v, &v, &size)) {
+ if (*v == '@' && !int64_from_string (v + 1, &v, &offset))
+ goto next_line;
+ } else {
+ goto next_line;
+ }
+ } else if (g_str_has_prefix (data_ext_x, "MAP:")) {
+ gchar *v, *a, *header_uri = NULL;
+
+ data = data + 11;
+
+ while (data != NULL && parse_attributes (&data, &a, &v)) {
+ if (strcmp (a, "URI") == 0) {
+ header_uri =
+ uri_join (self->base_uri ? self->base_uri : self->uri, v);
+ } else if (strcmp (a, "BYTERANGE") == 0) {
+ if (int64_from_string (v, &v, &size)) {
+ if (*v == '@' && !int64_from_string (v + 1, &v, &offset)) {
+ g_free (header_uri);
+ goto next_line;
+ }
+ } else {
+ g_free (header_uri);
+ goto next_line;
+ }
+ }
+ }
+
+ if (header_uri) {
+ GstM3U8InitFile *init_file;
+ init_file = gst_m3u8_init_file_new (header_uri);
+
+ if (size != -1) {
+ init_file->size = size;
+ if (offset != -1)
+ init_file->offset = offset;
+ else
+ init_file->offset = 0;
+ } else {
+ init_file->size = -1;
+ init_file->offset = 0;
+ }
+ if (last_init_file)
+ gst_m3u8_init_file_unref (last_init_file);
+
+ last_init_file = init_file;
+ }
- /* Store duration of the fragment we're using to update the position
++ }
++#ifdef TIZEN_FEATURE_AD
++ else if (g_str_has_prefix (data_ext_x, "CUE-OUT:")) {
++ GstM3U8Cue *cue;
++ gdouble fval;
++
++ GST_LOG ("cue out: %" GST_TIME_FORMAT ", %s", GST_TIME_ARGS (timestamp), data);
++
++ data = data + strlen ("#EXT-X-CUE-OUT:");
++ if (g_str_has_prefix (data, "DURATION="))
++ data = data + strlen ("DURATION=");
++
++ if (!double_from_string (data, &data, &fval)) {
++ GST_WARNING ("Can't read CUE-OUT duration");
++ goto next_line;
++ }
++
++ duration = fval * (gdouble) GST_SECOND;
++
++ cue = gst_m3u8_cue_info_new (timestamp, duration);
++ GST_LOG ("cue out start %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT
++ , GST_TIME_ARGS (cue->start_time), GST_TIME_ARGS (cue->duration));
++ self->ad_info->cue = g_list_append (self->ad_info->cue, cue);
++ duration = 0;
++ } else if (g_str_has_prefix (data_ext_x, "CUE-IN")) {
++ GList *cue;
++ GstM3U8Cue *cue_data;
++
++ GST_LOG ("cue in: %" GST_TIME_FORMAT ", %s", GST_TIME_ARGS (timestamp), data);
++
++ cue = g_list_last (self->ad_info->cue);
++ if (!cue || !(cue->data)) {
++ GST_WARNING ("there is no valid data");
++ goto next_line;
++ }
++
++ cue_data = cue->data;
++ GST_LOG ("start %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (cue_data->start_time), GST_TIME_ARGS (cue_data->duration));
++
++ if (cue_data->end_time != 0) {
++ GST_WARNING ("cue syntax err, skip this tag.");
++ goto next_line;
++ }
++
++ cue_data->end_time = timestamp;
++
++ GST_LOG ("cue start %" GST_TIME_FORMAT ", end %" GST_TIME_FORMAT " dur %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (cue_data->start_time), GST_TIME_ARGS (cue_data->end_time),
++ GST_TIME_ARGS (cue_data->duration));
++ } else if (g_str_has_prefix (data_ext_x, "CUE-OUT-CONT:")) {
++ GstM3U8CueOutCont *cont = g_new0 (GstM3U8CueOutCont, 1);
++
++ GST_LOG ("cue cont: %" GST_TIME_FORMAT ", %s", GST_TIME_ARGS (timestamp), data);
++
++ data = data + strlen ("#EXT-X-CUE-OUT-CONT:");
++ cont->timestamp = timestamp;
++ cont->cont_data = g_strdup (data);
++ self->ad_info->cue_cont = g_list_append (self->ad_info->cue_cont, cont);
++ }
++#endif
++ else {
+ GST_LOG ("Ignored line: %s", data);
+ }
+ } else {
+ GST_LOG ("Ignored line: %s", data);
+ }
+
+ next_line:
+ if (!end)
+ break;
+ data = g_utf8_next_char (end); /* skip \n */
+ }
+
+ g_free (current_key);
+ current_key = NULL;
+
+ self->files = g_list_reverse (self->files);
+
+ if (last_init_file)
+ gst_m3u8_init_file_unref (last_init_file);
+
+ if (previous_files) {
+ gboolean consistent = TRUE;
+
+ if (have_mediasequence) {
+ consistent = check_media_seqnums (self, previous_files);
+ } else {
+ generate_media_seqnums (self, previous_files);
+ }
+
+ g_list_foreach (previous_files, (GFunc) gst_m3u8_media_file_unref, NULL);
+ g_list_free (previous_files);
+ previous_files = NULL;
+
+ /* error was reported above already */
+ if (!consistent) {
+ GST_M3U8_UNLOCK (self);
+ return FALSE;
+ }
+ }
+
+ if (self->files == NULL) {
+ GST_ERROR ("Invalid media playlist, it does not contain any media files");
+ GST_M3U8_UNLOCK (self);
+ return FALSE;
+ }
+
+ /* calculate the start and end times of this media playlist. */
+ {
+ GList *walk;
+ GstM3U8MediaFile *file;
+ GstClockTime duration = 0;
+
+ mediasequence = -1;
+
+ for (walk = self->files; walk; walk = walk->next) {
+ file = walk->data;
+
+ if (mediasequence == -1) {
+ mediasequence = file->sequence;
+ } else if (mediasequence >= file->sequence) {
+ GST_ERROR ("Non-increasing media sequence");
+ GST_M3U8_UNLOCK (self);
+ return FALSE;
+ } else {
+ mediasequence = file->sequence;
+ }
+
+ duration += file->duration;
+ if (file->sequence > self->highest_sequence_number) {
+ if (self->highest_sequence_number >= 0) {
+ /* if an update of the media playlist has been missed, there
+ will be a gap between self->highest_sequence_number and the
+ first sequence number in this media playlist. In this situation
+ assume that the missing fragments had a duration of
+ targetduration each */
+ self->last_file_end +=
+ (file->sequence - self->highest_sequence_number -
+ 1) * self->targetduration;
+ }
+ self->last_file_end += file->duration;
+ self->highest_sequence_number = file->sequence;
+ }
+ }
+ if (GST_M3U8_IS_LIVE (self)) {
+ self->first_file_start = self->last_file_end - duration;
+ GST_DEBUG ("Live playlist range %" GST_TIME_FORMAT " -> %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (self->first_file_start),
+ GST_TIME_ARGS (self->last_file_end));
+ }
+ self->duration = duration;
+ }
+
+ /* first-time setup */
+ if (self->files && self->sequence == -1) {
+ GList *file;
+
+ if (GST_M3U8_IS_LIVE (self)) {
+ gint i;
+ GstClockTime sequence_pos = 0;
+
+ file = g_list_last (self->files);
+
+ if (self->last_file_end >= GST_M3U8_MEDIA_FILE (file->data)->duration) {
+ sequence_pos =
+ self->last_file_end - GST_M3U8_MEDIA_FILE (file->data)->duration;
+ }
+
+ /* for live streams, start GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE from
+ * the end of the playlist. See section 6.3.3 of HLS draft */
+ for (i = 0; i < GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE && file->prev &&
+ GST_M3U8_MEDIA_FILE (file->prev->data)->duration <= sequence_pos;
+ ++i) {
+ file = file->prev;
+ sequence_pos -= GST_M3U8_MEDIA_FILE (file->data)->duration;
+ }
+ self->sequence_position = sequence_pos;
+ } else {
+ file = g_list_first (self->files);
+ self->sequence_position = 0;
+ }
+ self->current_file = file;
+ self->sequence = GST_M3U8_MEDIA_FILE (file->data)->sequence;
+ GST_DEBUG ("first sequence: %u", (guint) self->sequence);
+ }
+
+ GST_LOG ("processed media playlist %s, %u fragments", self->name,
+ g_list_length (self->files));
+
+ GST_M3U8_UNLOCK (self);
+
+ return TRUE;
+ }
+
+ /* call with M3U8_LOCK held */
+ static GList *
+ m3u8_find_next_fragment (GstM3U8 * m3u8, gboolean forward)
+ {
+ GstM3U8MediaFile *file;
+ GList *l = m3u8->files;
+
+ if (forward) {
+ while (l) {
+ file = l->data;
+
+ if (file->sequence >= m3u8->sequence)
+ break;
+
+ l = l->next;
+ }
+ } else {
+ l = g_list_last (l);
+
+ while (l) {
+ file = l->data;
+
+ if (file->sequence <= m3u8->sequence)
+ break;
+
+ l = l->prev;
+ }
+ }
+
+ return l;
+ }
+
+ GstM3U8MediaFile *
+ gst_m3u8_get_next_fragment (GstM3U8 * m3u8, gboolean forward,
+ GstClockTime * sequence_position, gboolean * discont)
+ {
+ GstM3U8MediaFile *file = NULL;
+
+ g_return_val_if_fail (m3u8 != NULL, NULL);
+
+ GST_M3U8_LOCK (m3u8);
+
+ GST_DEBUG ("Looking for fragment %" G_GINT64_FORMAT, m3u8->sequence);
+
+ if (m3u8->sequence < 0) /* can't happen really */
+ goto out;
+
+ if (m3u8->current_file == NULL)
+ m3u8->current_file = m3u8_find_next_fragment (m3u8, forward);
+
+ if (m3u8->current_file == NULL)
+ goto out;
+
+ file = gst_m3u8_media_file_ref (m3u8->current_file->data);
+
+ GST_DEBUG ("Got fragment with sequence %u (current sequence %u)",
+ (guint) file->sequence, (guint) m3u8->sequence);
+
+ if (sequence_position)
+ *sequence_position = m3u8->sequence_position;
+ if (discont)
+ *discont = file->discont || (m3u8->sequence != file->sequence);
+
+ m3u8->current_file_duration = file->duration;
+ m3u8->sequence = file->sequence;
+
+ out:
+
+ GST_M3U8_UNLOCK (m3u8);
+
+ return file;
+ }
+
+ gboolean
+ gst_m3u8_has_next_fragment (GstM3U8 * m3u8, gboolean forward)
+ {
+ gboolean have_next;
+ GList *cur;
+
+ g_return_val_if_fail (m3u8 != NULL, FALSE);
+
+ GST_M3U8_LOCK (m3u8);
+
+ GST_DEBUG ("Checking next fragment %" G_GINT64_FORMAT,
+ m3u8->sequence + (forward ? 1 : -1));
+
+ if (m3u8->current_file) {
+ cur = m3u8->current_file;
+ } else {
+ cur = m3u8_find_next_fragment (m3u8, forward);
+ }
+
+ have_next = cur && ((forward && cur->next) || (!forward && cur->prev));
+
+ GST_M3U8_UNLOCK (m3u8);
+
+ return have_next;
+ }
+
+ /* call with M3U8_LOCK held */
+ static void
+ m3u8_alternate_advance (GstM3U8 * m3u8, gboolean forward)
+ {
+ gint targetnum = m3u8->sequence;
+ GList *tmp;
+ GstM3U8MediaFile *mf;
+
+ /* figure out the target seqnum */
+ if (forward)
+ targetnum += 1;
+ else
+ targetnum -= 1;
+
+ for (tmp = m3u8->files; tmp; tmp = tmp->next) {
+ mf = (GstM3U8MediaFile *) tmp->data;
+ if (mf->sequence == targetnum)
+ break;
+ }
+ if (tmp == NULL) {
+ GST_WARNING ("Can't find next fragment");
+ return;
+ }
+ m3u8->current_file = tmp;
+ m3u8->sequence = targetnum;
+ m3u8->current_file_duration = GST_M3U8_MEDIA_FILE (tmp->data)->duration;
+ }
+
+ void
+ gst_m3u8_advance_fragment (GstM3U8 * m3u8, gboolean forward)
+ {
+ GstM3U8MediaFile *file;
+
+ g_return_if_fail (m3u8 != NULL);
+
+ GST_M3U8_LOCK (m3u8);
+
+ GST_DEBUG ("Sequence position was %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (m3u8->sequence_position));
+ if (GST_CLOCK_TIME_IS_VALID (m3u8->current_file_duration)) {
+ /* Advance our position based on the previous fragment we played */
+ if (forward)
+ m3u8->sequence_position += m3u8->current_file_duration;
+ else if (m3u8->current_file_duration < m3u8->sequence_position)
+ m3u8->sequence_position -= m3u8->current_file_duration;
+ else
+ m3u8->sequence_position = 0;
+ m3u8->current_file_duration = GST_CLOCK_TIME_NONE;
+ GST_DEBUG ("Sequence position now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (m3u8->sequence_position));
+ }
+ if (!m3u8->current_file) {
+ GList *l;
+
+ GST_DEBUG ("Looking for fragment %" G_GINT64_FORMAT, m3u8->sequence);
+ for (l = m3u8->files; l != NULL; l = l->next) {
+ if (GST_M3U8_MEDIA_FILE (l->data)->sequence == m3u8->sequence) {
+ m3u8->current_file = l;
+ break;
+ }
+ }
+ if (m3u8->current_file == NULL) {
+ GST_DEBUG
+ ("Could not find current fragment, trying next fragment directly");
+ m3u8_alternate_advance (m3u8, forward);
+
+ /* Resync sequence number if the above has failed for live streams */
+ if (m3u8->current_file == NULL && GST_M3U8_IS_LIVE (m3u8)) {
+ /* for live streams, start GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE from
+ the end of the playlist. See section 6.3.3 of HLS draft */
+ gint pos =
+ g_list_length (m3u8->files) - GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE;
+ m3u8->current_file = g_list_nth (m3u8->files, pos >= 0 ? pos : 0);
+ m3u8->current_file_duration =
+ GST_M3U8_MEDIA_FILE (m3u8->current_file->data)->duration;
+
+ GST_WARNING ("Resyncing live playlist");
+ }
+ goto out;
+ }
+ }
+
+ file = GST_M3U8_MEDIA_FILE (m3u8->current_file->data);
+ GST_DEBUG ("Advancing from sequence %u", (guint) file->sequence);
+ if (forward) {
+ m3u8->current_file = m3u8->current_file->next;
+ if (m3u8->current_file) {
+ m3u8->sequence = GST_M3U8_MEDIA_FILE (m3u8->current_file->data)->sequence;
+ } else {
+ m3u8->sequence = file->sequence + 1;
+ }
+ } else {
+ m3u8->current_file = m3u8->current_file->prev;
+ if (m3u8->current_file) {
+ m3u8->sequence = GST_M3U8_MEDIA_FILE (m3u8->current_file->data)->sequence;
+ } else {
+ m3u8->sequence = file->sequence - 1;
+ }
+ }
+ if (m3u8->current_file) {
++ /* Store duration of the fragment we're using to update the position
+ * the next time we advance */
+ m3u8->current_file_duration =
+ GST_M3U8_MEDIA_FILE (m3u8->current_file->data)->duration;
+ }
+
+ out:
+
+ GST_M3U8_UNLOCK (m3u8);
+ }
+
+ GstClockTime
+ gst_m3u8_get_duration (GstM3U8 * m3u8)
+ {
+ GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+ g_return_val_if_fail (m3u8 != NULL, GST_CLOCK_TIME_NONE);
+
+ GST_M3U8_LOCK (m3u8);
+
+ /* We can only get the duration for on-demand streams */
+ if (!m3u8->endlist)
+ goto out;
+
+ if (!GST_CLOCK_TIME_IS_VALID (m3u8->duration) && m3u8->files != NULL) {
+ GList *f;
+
+ m3u8->duration = 0;
+ for (f = m3u8->files; f != NULL; f = f->next)
+ m3u8->duration += GST_M3U8_MEDIA_FILE (f)->duration;
+ }
+ duration = m3u8->duration;
+
+ out:
+
+ GST_M3U8_UNLOCK (m3u8);
+
+ return duration;
+ }
+
+ GstClockTime
+ gst_m3u8_get_target_duration (GstM3U8 * m3u8)
+ {
+ GstClockTime target_duration;
+
+ g_return_val_if_fail (m3u8 != NULL, GST_CLOCK_TIME_NONE);
+
+ GST_M3U8_LOCK (m3u8);
+ target_duration = m3u8->targetduration;
+ GST_M3U8_UNLOCK (m3u8);
+
+ return target_duration;
+ }
+
+ gchar *
+ gst_m3u8_get_uri (GstM3U8 * m3u8)
+ {
+ gchar *uri;
+
+ GST_M3U8_LOCK (m3u8);
+ uri = g_strdup (m3u8->uri);
+ GST_M3U8_UNLOCK (m3u8);
+
+ return uri;
+ }
+
+ gboolean
+ gst_m3u8_is_live (GstM3U8 * m3u8)
+ {
+ gboolean is_live;
+
+ g_return_val_if_fail (m3u8 != NULL, FALSE);
+
+ GST_M3U8_LOCK (m3u8);
+ is_live = GST_M3U8_IS_LIVE (m3u8);
+ GST_M3U8_UNLOCK (m3u8);
+
+ return is_live;
+ }
+
+ gchar *
+ uri_join (const gchar * uri1, const gchar * uri2)
+ {
+ gchar *uri_copy, *tmp, *ret = NULL;
+
+ if (gst_uri_is_valid (uri2))
+ return g_strdup (uri2);
+
+ uri_copy = g_strdup (uri1);
+ if (uri2[0] != '/') {
+ /* uri2 is a relative uri2 */
+ /* look for query params */
+ tmp = g_utf8_strchr (uri_copy, -1, '?');
+ if (tmp) {
+ /* find last / char, ignoring query params */
+ tmp = g_utf8_strrchr (uri_copy, tmp - uri_copy, '/');
+ } else {
+ /* find last / char in URL */
+ tmp = g_utf8_strrchr (uri_copy, -1, '/');
+ }
+ if (!tmp) {
+ GST_WARNING ("Can't build a valid uri_copy");
+ goto out;
+ }
+
+ *tmp = '\0';
+ ret = g_strdup_printf ("%s/%s", uri_copy, uri2);
+ } else {
+ /* uri2 is an absolute uri2 */
+ char *scheme, *hostname;
+
+ scheme = uri_copy;
+ /* find the : in <scheme>:// */
+ tmp = g_utf8_strchr (uri_copy, -1, ':');
+ if (!tmp) {
+ GST_WARNING ("Can't build a valid uri_copy");
+ goto out;
+ }
+
+ *tmp = '\0';
+
+ /* skip :// */
+ hostname = tmp + 3;
+
+ tmp = g_utf8_strchr (hostname, -1, '/');
+ if (tmp)
+ *tmp = '\0';
+
+ ret = g_strdup_printf ("%s://%s%s", scheme, hostname, uri2);
+ }
+
+ out:
+ g_free (uri_copy);
+ return ret;
+ }
+
+ gboolean
+ gst_m3u8_get_seek_range (GstM3U8 * m3u8, gint64 * start, gint64 * stop)
+ {
+ GstClockTime duration = 0;
+ GList *walk;
+ GstM3U8MediaFile *file;
+ guint count;
+ guint min_distance = 0;
+
+ g_return_val_if_fail (m3u8 != NULL, FALSE);
+
+ GST_M3U8_LOCK (m3u8);
+
+ if (m3u8->files == NULL)
+ goto out;
+
+ if (GST_M3U8_IS_LIVE (m3u8)) {
+ /* min_distance is used to make sure the seek range is never closer than
+ GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE fragments from the end of a live
+ playlist - see 6.3.3. "Playing the Playlist file" of the HLS draft */
+ min_distance = GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE;
+ }
+ count = g_list_length (m3u8->files);
+
+ for (walk = m3u8->files; walk && count > min_distance; walk = walk->next) {
+ file = walk->data;
+ --count;
+ duration += file->duration;
+ }
+
+ if (duration <= 0)
+ goto out;
+
+ *start = m3u8->first_file_start;
+ *stop = *start + duration;
+
+ out:
+
+ GST_M3U8_UNLOCK (m3u8);
+ return (duration > 0);
+ }
+
+ GstHLSMedia *
+ gst_hls_media_ref (GstHLSMedia * media)
+ {
+ g_assert (media != NULL && media->ref_count > 0);
+ g_atomic_int_add (&media->ref_count, 1);
+ return media;
+ }
+
+ void
+ gst_hls_media_unref (GstHLSMedia * media)
+ {
+ g_assert (media != NULL && media->ref_count > 0);
+ if (g_atomic_int_dec_and_test (&media->ref_count)) {
+ if (media->playlist)
+ gst_m3u8_unref (media->playlist);
+ g_free (media->group_id);
+ g_free (media->name);
+ g_free (media->uri);
+ g_free (media->lang);
+ g_free (media);
+ }
+ }
+
+ static GstHLSMediaType
+ gst_m3u8_get_hls_media_type_from_string (const gchar * type_name)
+ {
+ if (strcmp (type_name, "AUDIO") == 0)
+ return GST_HLS_MEDIA_TYPE_AUDIO;
+ if (strcmp (type_name, "VIDEO") == 0)
+ return GST_HLS_MEDIA_TYPE_VIDEO;
+ if (strcmp (type_name, "SUBTITLES") == 0)
+ return GST_HLS_MEDIA_TYPE_SUBTITLES;
+ if (strcmp (type_name, "CLOSED_CAPTIONS") == 0)
+ return GST_HLS_MEDIA_TYPE_CLOSED_CAPTIONS;
+
+ return GST_HLS_MEDIA_TYPE_INVALID;
+ }
+
+ #define GST_HLS_MEDIA_TYPE_NAME(mtype) gst_hls_media_type_get_name(mtype)
+ const gchar *
+ gst_hls_media_type_get_name (GstHLSMediaType mtype)
+ {
+ static const gchar *nicks[GST_HLS_N_MEDIA_TYPES] = { "audio", "video",
+ "subtitle", "closed-captions"
+ };
+
+ if (mtype < 0 || mtype >= GST_HLS_N_MEDIA_TYPES)
+ return "invalid";
+
+ return nicks[mtype];
+ }
+
+ /* returns unquoted copy of string */
+ static gchar *
+ gst_m3u8_unquote (const gchar * str)
+ {
+ const gchar *start, *end;
+
+ start = strchr (str, '"');
+ if (start == NULL)
+ return g_strdup (str);
+ end = strchr (start + 1, '"');
+ if (end == NULL) {
+ GST_WARNING ("Broken quoted string [%s] - can't find end quote", str);
+ return g_strdup (start + 1);
+ }
+ return g_strndup (start + 1, (gsize) (end - (start + 1)));
+ }
+
+ static GstHLSMedia *
+ gst_m3u8_parse_media (gchar * desc, const gchar * base_uri)
+ {
+ GstHLSMedia *media;
+ gchar *a, *v;
+
+ media = g_new0 (GstHLSMedia, 1);
+ media->ref_count = 1;
+ media->playlist = gst_m3u8_new ();
+ media->mtype = GST_HLS_MEDIA_TYPE_INVALID;
+
+ GST_LOG ("parsing %s", desc);
+ while (desc != NULL && parse_attributes (&desc, &a, &v)) {
+ if (strcmp (a, "TYPE") == 0) {
+ media->mtype = gst_m3u8_get_hls_media_type_from_string (v);
+ } else if (strcmp (a, "GROUP-ID") == 0) {
+ g_free (media->group_id);
+ media->group_id = gst_m3u8_unquote (v);
+ } else if (strcmp (a, "NAME") == 0) {
+ g_free (media->name);
+ media->name = gst_m3u8_unquote (v);
+ } else if (strcmp (a, "URI") == 0) {
+ gchar *uri;
+
+ g_free (media->uri);
+ uri = gst_m3u8_unquote (v);
+ media->uri = uri_join (base_uri, uri);
+ g_free (uri);
+ } else if (strcmp (a, "LANGUAGE") == 0) {
+ g_free (media->lang);
+ media->lang = gst_m3u8_unquote (v);
+ } else if (strcmp (a, "DEFAULT") == 0) {
+ media->is_default = g_ascii_strcasecmp (v, "yes") == 0;
+ } else if (strcmp (a, "FORCED") == 0) {
+ media->forced = g_ascii_strcasecmp (v, "yes") == 0;
+ } else if (strcmp (a, "AUTOSELECT") == 0) {
+ media->autoselect = g_ascii_strcasecmp (v, "yes") == 0;
+ } else {
+ /* unhandled: ASSOC-LANGUAGE, INSTREAM-ID, CHARACTERISTICS */
+ GST_FIXME ("EXT-X-MEDIA: unhandled attribute: %s = %s", a, v);
+ }
+ }
+
+ if (media->mtype == GST_HLS_MEDIA_TYPE_INVALID)
+ goto required_attributes_missing;
+
++#ifndef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
+ if (media->uri == NULL)
+ goto existing_stream;
++#endif
+
+ if (media->group_id == NULL || media->name == NULL)
+ goto required_attributes_missing;
+
++#ifndef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
+ if (media->mtype == GST_HLS_MEDIA_TYPE_CLOSED_CAPTIONS)
+ goto uri_with_cc;
++#endif
+
+ GST_DEBUG ("media: %s, group '%s', name '%s', uri '%s', %s %s %s, lang=%s",
+ GST_HLS_MEDIA_TYPE_NAME (media->mtype), media->group_id, media->name,
+ media->uri, media->is_default ? "default" : "-",
+ media->autoselect ? "autoselect" : "-",
+ media->forced ? "forced" : "-", media->lang ? media->lang : "??");
+
+ return media;
+
++#ifndef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
+ uri_with_cc:
+ {
+ GST_WARNING ("closed captions EXT-X-MEDIA should not have URI specified");
+ goto out_error;
+ }
++#endif
+ required_attributes_missing:
+ {
+ GST_WARNING ("EXT-X-MEDIA description is missing required attributes");
+ goto out_error;
+ /* fall through */
+ }
++#ifndef TIZEN_FEATURE_HLSDEMUX_LANG_TAG
+ existing_stream:
+ {
+ GST_DEBUG ("EXT-X-MEDIA without URI, describes embedded stream, skipping");
+ /* fall through */
+ }
++#endif
+
+ out_error:
+ {
+ gst_hls_media_unref (media);
+ return NULL;
+ }
+ }
+
+ static GstHLSVariantStream *
+ gst_hls_variant_stream_new (void)
+ {
+ GstHLSVariantStream *stream;
+
+ stream = g_new0 (GstHLSVariantStream, 1);
+ stream->m3u8 = gst_m3u8_new ();
+ stream->refcount = 1;
+ return stream;
+ }
+
+ GstHLSVariantStream *
+ gst_hls_variant_stream_ref (GstHLSVariantStream * stream)
+ {
+ g_atomic_int_inc (&stream->refcount);
+ return stream;
+ }
+
+ void
+ gst_hls_variant_stream_unref (GstHLSVariantStream * stream)
+ {
+ if (g_atomic_int_dec_and_test (&stream->refcount)) {
+ gint i;
+
+ g_free (stream->name);
+ g_free (stream->uri);
+ g_free (stream->codecs);
+ gst_m3u8_unref (stream->m3u8);
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ g_free (stream->media_groups[i]);
+ g_list_free_full (stream->media[i], (GDestroyNotify) gst_hls_media_unref);
+ }
+ g_free (stream);
+ }
+ }
+
+ static GstHLSVariantStream *
+ find_variant_stream_by_name (GList * list, const gchar * name)
+ {
+ for (; list != NULL; list = list->next) {
+ GstHLSVariantStream *variant_stream = list->data;
+
+ if (variant_stream->name != NULL && !strcmp (variant_stream->name, name))
+ return variant_stream;
+ }
+ return NULL;
+ }
+
+ static GstHLSVariantStream *
+ find_variant_stream_by_uri (GList * list, const gchar * uri)
+ {
+ for (; list != NULL; list = list->next) {
+ GstHLSVariantStream *variant_stream = list->data;
+
+ if (variant_stream->uri != NULL && !strcmp (variant_stream->uri, uri))
+ return variant_stream;
+ }
+ return NULL;
+ }
+
+ static GstHLSMasterPlaylist *
+ gst_hls_master_playlist_new (void)
+ {
+ GstHLSMasterPlaylist *playlist;
+
+ playlist = g_new0 (GstHLSMasterPlaylist, 1);
+ playlist->refcount = 1;
+ playlist->is_simple = FALSE;
+
+ return playlist;
+ }
+
+ void
+ gst_hls_master_playlist_unref (GstHLSMasterPlaylist * playlist)
+ {
+ if (g_atomic_int_dec_and_test (&playlist->refcount)) {
+ g_list_free_full (playlist->variants,
+ (GDestroyNotify) gst_hls_variant_stream_unref);
+ g_list_free_full (playlist->iframe_variants,
+ (GDestroyNotify) gst_hls_variant_stream_unref);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ g_list_free_full (playlist->variant_info, g_free);
++#endif
+ if (playlist->default_variant)
+ gst_hls_variant_stream_unref (playlist->default_variant);
+ g_free (playlist->last_data);
+ g_free (playlist);
+ }
+ }
+
+ static gint
+ hls_media_name_compare_func (gconstpointer media, gconstpointer name)
+ {
+ return strcmp (((GstHLSMedia *) media)->name, (const gchar *) name);
+ }
+
+ /* Takes ownership of @data */
+ GstHLSMasterPlaylist *
+ gst_hls_master_playlist_new_from_data (gchar * data, const gchar * base_uri)
+ {
+ GHashTable *media_groups[GST_HLS_N_MEDIA_TYPES] = { NULL, };
+ GstHLSMasterPlaylist *playlist;
+ GstHLSVariantStream *pending_stream;
+ gchar *end, *free_data = data;
+ gint val, i;
+ GList *l;
+
+ if (!g_str_has_prefix (data, "#EXTM3U")) {
+ GST_WARNING ("Data doesn't start with #EXTM3U");
+ g_free (free_data);
+ return NULL;
+ }
+
+ playlist = gst_hls_master_playlist_new ();
+
+ /* store data before we modify it for parsing */
+ playlist->last_data = g_strdup (data);
+
+ GST_TRACE ("data:\n%s", data);
+
+ if (strstr (data, "\n#EXTINF:") != NULL) {
+ GST_INFO ("This is a simple media playlist, not a master playlist");
+
+ pending_stream = gst_hls_variant_stream_new ();
+ pending_stream->name = g_strdup (base_uri);
+ pending_stream->uri = g_strdup (base_uri);
+ gst_m3u8_set_uri (pending_stream->m3u8, base_uri, NULL, base_uri);
+ playlist->variants = g_list_append (playlist->variants, pending_stream);
+ playlist->default_variant = gst_hls_variant_stream_ref (pending_stream);
+ playlist->is_simple = TRUE;
+
+ if (!gst_m3u8_update (pending_stream->m3u8, data)) {
+ GST_WARNING ("Failed to parse media playlist");
+ gst_hls_master_playlist_unref (playlist);
+ playlist = NULL;
+ }
+ return playlist;
+ }
+
+ pending_stream = NULL;
+ data += 7;
+ while (TRUE) {
+ gchar *r;
+
+ end = g_utf8_strchr (data, -1, '\n');
+ if (end)
+ *end = '\0';
+
+ r = g_utf8_strchr (data, -1, '\r');
+ if (r)
+ *r = '\0';
+
+ if (data[0] != '#' && data[0] != '\0') {
+ gchar *name, *uri;
+
+ if (pending_stream == NULL) {
+ GST_LOG ("%s: got line without EXT-STREAM-INF, dropping", data);
+ goto next_line;
+ }
+
+ name = data;
+ uri = uri_join (base_uri, name);
+ if (uri == NULL)
+ goto next_line;
+
+ pending_stream->name = g_strdup (name);
+ pending_stream->uri = uri;
+
+ if (find_variant_stream_by_name (playlist->variants, name)
+ || find_variant_stream_by_uri (playlist->variants, uri)) {
+ GST_DEBUG ("Already have a list with this name or URI: %s", name);
+ gst_hls_variant_stream_unref (pending_stream);
+ } else {
+ GST_INFO ("stream %s @ %u: %s", name, pending_stream->bandwidth, uri);
+ gst_m3u8_set_uri (pending_stream->m3u8, uri, NULL, name);
+ playlist->variants = g_list_append (playlist->variants, pending_stream);
+ /* use first stream in the playlist as default */
+ if (playlist->default_variant == NULL) {
+ playlist->default_variant =
+ gst_hls_variant_stream_ref (pending_stream);
+ }
+ }
+ pending_stream = NULL;
+ } else if (g_str_has_prefix (data, "#EXT-X-VERSION:")) {
+ if (int_from_string (data + 15, &data, &val))
+ playlist->version = val;
+ } else if (g_str_has_prefix (data, "#EXT-X-STREAM-INF:") ||
+ g_str_has_prefix (data, "#EXT-X-I-FRAME-STREAM-INF:")) {
+ GstHLSVariantStream *stream;
+ gchar *v, *a;
+
+ stream = gst_hls_variant_stream_new ();
+ stream->iframe = g_str_has_prefix (data, "#EXT-X-I-FRAME-STREAM-INF:");
+ data += stream->iframe ? 26 : 18;
+ while (data && parse_attributes (&data, &a, &v)) {
+ if (g_str_equal (a, "BANDWIDTH")) {
+ if (!stream->bandwidth) {
+ if (!int_from_string (v, NULL, &stream->bandwidth))
+ GST_WARNING ("Error while reading BANDWIDTH");
+ }
+ } else if (g_str_equal (a, "AVERAGE-BANDWIDTH")) {
+ GST_DEBUG
+ ("AVERAGE-BANDWIDTH attribute available. Using it as stream bandwidth");
+ if (!int_from_string (v, NULL, &stream->bandwidth))
+ GST_WARNING ("Error while reading AVERAGE-BANDWIDTH");
+ } else if (g_str_equal (a, "PROGRAM-ID")) {
+ if (!int_from_string (v, NULL, &stream->program_id))
+ GST_WARNING ("Error while reading PROGRAM-ID");
+ } else if (g_str_equal (a, "CODECS")) {
+ g_free (stream->codecs);
+ stream->codecs = g_strdup (v);
+ } else if (g_str_equal (a, "RESOLUTION")) {
+ if (!int_from_string (v, &v, &stream->width))
+ GST_WARNING ("Error while reading RESOLUTION width");
+ if (!v || *v != 'x') {
+ GST_WARNING ("Missing height");
+ } else {
+ v = g_utf8_next_char (v);
+ if (!int_from_string (v, NULL, &stream->height))
+ GST_WARNING ("Error while reading RESOLUTION height");
+ }
+ } else if (stream->iframe && g_str_equal (a, "URI")) {
+ stream->uri = uri_join (base_uri, v);
+ if (stream->uri != NULL) {
+ stream->name = g_strdup (stream->uri);
+ gst_m3u8_set_uri (stream->m3u8, stream->uri, NULL, stream->name);
+ } else {
+ gst_hls_variant_stream_unref (stream);
+ }
+ } else if (g_str_equal (a, "AUDIO")) {
+ g_free (stream->media_groups[GST_HLS_MEDIA_TYPE_AUDIO]);
+ stream->media_groups[GST_HLS_MEDIA_TYPE_AUDIO] = gst_m3u8_unquote (v);
+ } else if (g_str_equal (a, "SUBTITLES")) {
+ g_free (stream->media_groups[GST_HLS_MEDIA_TYPE_SUBTITLES]);
+ stream->media_groups[GST_HLS_MEDIA_TYPE_SUBTITLES] =
+ gst_m3u8_unquote (v);
+ } else if (g_str_equal (a, "VIDEO")) {
+ g_free (stream->media_groups[GST_HLS_MEDIA_TYPE_VIDEO]);
+ stream->media_groups[GST_HLS_MEDIA_TYPE_VIDEO] = gst_m3u8_unquote (v);
+ } else if (g_str_equal (a, "CLOSED-CAPTIONS")) {
+ /* closed captions will be embedded inside the video stream, ignore */
+ }
+ }
+
+ if (stream->iframe) {
+ if (find_variant_stream_by_uri (playlist->iframe_variants, stream->uri)) {
+ GST_DEBUG ("Already have a list with this URI");
+ gst_hls_variant_stream_unref (stream);
+ } else {
+ playlist->iframe_variants =
+ g_list_append (playlist->iframe_variants, stream);
+ }
+ } else {
+ if (pending_stream != NULL) {
+ GST_WARNING ("variant stream without uri, dropping");
+ gst_hls_variant_stream_unref (pending_stream);
+ }
+ pending_stream = stream;
+ }
+ } else if (g_str_has_prefix (data, "#EXT-X-MEDIA:")) {
+ GstHLSMedia *media;
+ GList *list;
+
+ media = gst_m3u8_parse_media (data + strlen ("#EXT-X-MEDIA:"), base_uri);
+
+ if (media == NULL)
+ goto next_line;
+
+ if (media_groups[media->mtype] == NULL) {
+ media_groups[media->mtype] =
+ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
+ }
+
+ list = g_hash_table_lookup (media_groups[media->mtype], media->group_id);
+
+ /* make sure there isn't already a media with the same name */
+ if (!g_list_find_custom (list, media->name, hls_media_name_compare_func)) {
+ g_hash_table_replace (media_groups[media->mtype],
+ g_strdup (media->group_id), g_list_append (list, media));
+ GST_INFO ("Added media %s to group %s", media->name, media->group_id);
+ } else {
+ GST_WARNING (" media with name '%s' already exists in group '%s'!",
+ media->name, media->group_id);
+ gst_hls_media_unref (media);
+ }
+ } else if (*data != '\0') {
+ GST_LOG ("Ignored line: %s", data);
+ }
+
+ next_line:
+ if (!end)
+ break;
+ data = g_utf8_next_char (end); /* skip \n */
+ }
+
+ if (pending_stream != NULL) {
+ GST_WARNING ("#EXT-X-STREAM-INF without uri, dropping");
+ gst_hls_variant_stream_unref (pending_stream);
+ }
+
+ g_free (free_data);
+
+ /* Add alternative renditions media to variant streams */
+ for (l = playlist->variants; l != NULL; l = l->next) {
+ GstHLSVariantStream *stream = l->data;
+ GList *mlist;
+
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ if (stream->media_groups[i] != NULL && media_groups[i] != NULL) {
+ GST_INFO ("Adding %s group '%s' to stream '%s'",
+ GST_HLS_MEDIA_TYPE_NAME (i), stream->media_groups[i], stream->name);
+
+ mlist = g_hash_table_lookup (media_groups[i], stream->media_groups[i]);
+
+ if (mlist == NULL)
+ GST_WARNING ("Group '%s' does not exist!", stream->media_groups[i]);
+
+ while (mlist != NULL) {
+ GstHLSMedia *media = mlist->data;
+
+ GST_DEBUG (" %s media %s, uri: %s", GST_HLS_MEDIA_TYPE_NAME (i),
+ media->name, media->uri);
+
+ stream->media[i] =
+ g_list_append (stream->media[i], gst_hls_media_ref (media));
+ mlist = mlist->next;
+ }
+ }
+ }
+ }
+
+ /* clean up our temporary alternative rendition groups hash tables */
+ for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+ if (media_groups[i] != NULL) {
+ GList *groups, *mlist;
+
+ groups = g_hash_table_get_keys (media_groups[i]);
+ for (l = groups; l != NULL; l = l->next) {
+ mlist = g_hash_table_lookup (media_groups[i], l->data);
+ g_list_free_full (mlist, (GDestroyNotify) gst_hls_media_unref);
+ }
+ g_list_free (groups);
+ g_hash_table_unref (media_groups[i]);
+ }
+ }
+
+ if (playlist->variants == NULL) {
+ GST_WARNING ("Master playlist without any media playlists!");
+ gst_hls_master_playlist_unref (playlist);
+ return NULL;
+ }
+
+ /* reorder variants by bitrate */
+ playlist->variants =
+ g_list_sort (playlist->variants,
+ (GCompareFunc) gst_hls_variant_stream_compare_by_bitrate);
+
+ playlist->iframe_variants =
+ g_list_sort (playlist->iframe_variants,
+ (GCompareFunc) gst_hls_variant_stream_compare_by_bitrate);
+
+ /* FIXME: restore old current_variant after master playlist update
+ * (move into code that does that update) */
+ #if 0
+ {
+ gchar *top_variant_uri = NULL;
+ gboolean iframe = FALSE;
+
+ if (!self->current_variant) {
+ top_variant_uri = GST_M3U8 (self->lists->data)->uri;
+ } else {
+ top_variant_uri = GST_M3U8 (self->current_variant->data)->uri;
+ iframe = GST_M3U8 (self->current_variant->data)->iframe;
+ }
+
+ /* here we sorted the lists */
+
+ if (iframe)
+ playlist->current_variant =
+ find_variant_stream_by_uri (playlist->iframe_variants,
+ top_variant_uri);
+ else
+ playlist->current_variant =
+ find_variant_stream_by_uri (playlist->variants, top_variant_uri);
+ }
+ #endif
+
+ GST_DEBUG ("parsed master playlist with %d streams and %d I-frame streams",
+ g_list_length (playlist->variants),
+ g_list_length (playlist->iframe_variants));
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ GList *v = (playlist->iframe_variants)?(playlist->iframe_variants):(playlist->variants);
++
++ /* update variant stream info */
++ for (; v != NULL; v = v->next) {
++ GstHLSVariantStream *data = v->data;
++ GstM3U8VideoVariantInfo *var_info = g_new0 (GstM3U8VideoVariantInfo, 1);
++
++ GST_LOG ("variant info %d, %d x %d", data->bandwidth, data->width,
++ data->height);
++ var_info->bandwidth = data->bandwidth;
++ var_info->width = data->width;
++ var_info->height = data->height;
++
++ playlist->variant_info = g_list_append (playlist->variant_info, var_info);
++ }
++#endif
+
+ return playlist;
+ }
+
+ gboolean
+ gst_hls_variant_stream_is_live (GstHLSVariantStream * variant)
+ {
+ gboolean is_live;
+
+ g_return_val_if_fail (variant != NULL, FALSE);
+
+ is_live = gst_m3u8_is_live (variant->m3u8);
+
+ return is_live;
+ }
+
+ static gint
+ compare_media (const GstHLSMedia * a, const GstHLSMedia * b)
+ {
+ return strcmp (a->name, b->name);
+ }
+
+ GstHLSMedia *
+ gst_hls_variant_find_matching_media (GstHLSVariantStream * stream,
+ GstHLSMedia * media)
+ {
+ GList *mlist = stream->media[media->mtype];
+ GList *match;
+
+ if (mlist == NULL)
+ return NULL;
+
+ match = g_list_find_custom (mlist, media, (GCompareFunc) compare_media);
+ if (match == NULL)
+ return NULL;
+
+ return match->data;
+ }
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++static guint
++get_num_of_codec(GstHLSVariantStream * variant)
++{
++#define MAX_NUM_OF_CODEC 10
++
++ guint cnt = 0;
++ gchar** codec_list = NULL;
++
++ if (!variant || !variant->codecs)
++ return 0;
++
++ codec_list = g_strsplit (variant->codecs, ",", MAX_NUM_OF_CODEC);
++ if (codec_list) {
++ cnt = g_strv_length (codec_list);
++ g_strfreev (codec_list);
++ }
++
++ return cnt;
++}
++
++static gboolean
++check_num_of_codec(GstHLSVariantStream * variant, guint req_num)
++{
++ guint num_of_codec = 0;
++
++ if (!variant)
++ return FALSE;
++
++ num_of_codec = get_num_of_codec (variant);
++
++ if (num_of_codec > 0 && req_num > 0 && num_of_codec != req_num) {
++ GST_WARNING ("can not support to change codec");
++ return FALSE;
++ }
++
++ return TRUE;
++}
++
++static gint
++update_max_limit(gint min_limit, gint max_limit, gint start_limit, gint bitrate)
++{
++ if (start_limit > DEFAULT_BANDWIDTH_LIMIT && start_limit >= min_limit &&
++ (start_limit <= max_limit || max_limit == DEFAULT_BANDWIDTH_LIMIT)) {
++ GST_DEBUG ("set max to start : %d ", start_limit);
++ return start_limit;
++ }
++
++ if (bitrate > 0 && bitrate >= min_limit &&
++ (bitrate <= max_limit || max_limit == DEFAULT_BANDWIDTH_LIMIT)) {
++ GST_DEBUG ("set max to bitrate : %d ", bitrate);
++ return bitrate;
++ }
++
++ if (bitrate == 0 || bitrate < min_limit) {
++ GST_DEBUG ("set max to min : %d", min_limit);
++ return min_limit;
++ }
++
++ return max_limit;
++}
++
++GstHLSVariantStream *
++get_average_variant(GList *variants, guint num_of_codec, gint min_limit, gint max_limit)
++{
++ GList *l = NULL;
++ GList *valid_list = NULL;
++ gint cnt = 0;
++ guint num_of_valid_variant = 0;
++ GstHLSVariantStream *tmp = NULL;
++
++ for (l = g_list_first (variants); l; l = g_list_next (l)) {
++ if (!check_num_of_codec ((GstHLSVariantStream *)l->data, num_of_codec))
++ continue;
++
++ tmp = l->data;
++
++ if (max_limit != DEFAULT_BANDWIDTH_LIMIT && tmp->bandwidth > max_limit) {
++ GST_DEBUG ("over max limit");
++ break;
++ }
++
++ if (min_limit != DEFAULT_BANDWIDTH_LIMIT && tmp->bandwidth < min_limit) {
++ GST_DEBUG ("skip to next");
++ continue;
++ }
++
++ num_of_valid_variant++;
++ valid_list = l;
++ }
++
++ GST_DEBUG ("num of valid variant %d / %d", num_of_valid_variant, g_list_length (variants));
++
++ for (; valid_list; valid_list = g_list_previous (valid_list)) {
++ if (!check_num_of_codec ((GstHLSVariantStream *)valid_list->data, num_of_codec))
++ continue;
++
++ tmp = valid_list->data;
++ if (num_of_valid_variant/2 == cnt) {
++ GST_DEBUG ("get this stream %d", tmp->bandwidth);
++ return tmp;
++ }
++ cnt++;
++ }
++ return NULL;
++}
++
++GstHLSVariantStream *
++gst_hls_master_playlist_get_variant_for_bandwitdh_limit (GstHLSMasterPlaylist * playlist,
++ GstHLSVariantStream * current_variant, guint bitrate, gchar * start_bandwidth,
++ gint min_bandwidth, gint max_bandwidth, gint width, gint height)
++{
++ GstHLSVariantStream *tmp = current_variant;
++ GstHLSVariantStream *variant = NULL;
++ GstHLSVariantStream *min_variant = NULL; // lowest
++ GstHLSVariantStream *max_variant = NULL; // highest
++ GList *variants = NULL;
++ GList *l = NULL;
++ gint max_limit = DEFAULT_BANDWIDTH_LIMIT;
++ gint min_limit = DEFAULT_BANDWIDTH_LIMIT;
++ gint start_limit = DEFAULT_BANDWIDTH_LIMIT;
++ gint adj_max_limit = DEFAULT_BANDWIDTH_LIMIT;
++ guint num_of_valid_variant = 0;
++ guint num_of_codec = 0;
++
++ num_of_codec = get_num_of_codec (current_variant);
++
++ GST_DEBUG ("bitrate: %u, bandwidth: %s, %d ~ %d, resolution: %d X %d",
++ bitrate, start_bandwidth, min_bandwidth, max_bandwidth, width, height);
++
++ /* get variant list */
++ if (current_variant == NULL || !current_variant->iframe)
++ variants = playlist->variants;
++ else
++ variants = playlist->iframe_variants;
++
++ if (!variants) {
++ GST_ERROR ("invalid playlist");
++ return current_variant;
++ }
++
++ /* get valid min/max variant */
++ for (l = g_list_first (variants); l; l = g_list_next (l)) {
++ if (!check_num_of_codec ((GstHLSVariantStream *)l->data, num_of_codec))
++ continue;
++ tmp = l->data;
++ num_of_valid_variant++;
++
++ if (!min_variant) {
++ min_variant = tmp;
++ }
++ }
++ max_variant = tmp;
++
++ GST_DEBUG("num of valid variant %d / %d", num_of_valid_variant, g_list_length (variants));
++ if (num_of_valid_variant <= 1)
++ return tmp;
++
++ /* get valid range limit */
++ if (max_bandwidth == DEFAULT_BANDWIDTH_LIMIT || min_bandwidth <= max_bandwidth) {
++ if (min_variant->bandwidth <= max_bandwidth)
++ max_limit = adj_max_limit = max_bandwidth;
++
++ if (max_variant->bandwidth >= min_bandwidth)
++ min_limit = min_bandwidth;
++ }
++
++ GST_DEBUG ("range limit: %d ~ %d", min_limit, max_limit);
++
++ if (start_bandwidth) {
++ if (!g_strcmp0 (start_bandwidth, "LOWEST")) {
++ if (min_limit == DEFAULT_BANDWIDTH_LIMIT)
++ return min_variant;
++ adj_max_limit = min_limit;
++ } else if (!g_strcmp0 (start_bandwidth, "HIGHEST")) {
++ if (max_limit == DEFAULT_BANDWIDTH_LIMIT)
++ return max_variant;
++ } else if (!g_strcmp0 (start_bandwidth, "AVERAGE")) {
++ variant = get_average_variant (variants, num_of_codec, min_limit, max_limit);
++ if (variant)
++ return variant;
++ } else {
++ start_limit = atoi (start_bandwidth);
++ /* update max limit based on the start_bandwidth or network bitrate */
++ adj_max_limit = update_max_limit (min_limit, max_limit, start_limit, bitrate);
++ }
++ } else {
++ /* update max limit based on the network bitrate */
++ adj_max_limit = update_max_limit (min_limit, max_limit, DEFAULT_BANDWIDTH_LIMIT, bitrate);
++ }
++
++ if (min_limit < 0 && adj_max_limit < 0 && width < 0 && height < 0) {
++ GST_WARNING ("invalid condition, get default variant");
++ return NULL;
++ }
++
++ GST_DEBUG ("adj range limit: %d ~ %d (origin: %d)", min_limit, adj_max_limit, max_limit);
++
++ /* variant lists are sorted low to high, so iterate from highest to lowest */
++ tmp = NULL;
++ for (l = g_list_last (variants); l; l = g_list_previous (l)) {
++ if (!check_num_of_codec ((GstHLSVariantStream *)l->data, num_of_codec))
++ continue;
++
++ tmp = l->data;
++ GST_DEBUG ("stream info: %d, %d x %d", tmp->bandwidth, tmp->width, tmp->height);
++
++ if (tmp->bandwidth < min_limit) {
++ GList *j = g_list_next(l);
++ if (variant)
++ break;
++
++ if (j &&
++ ((max_limit == DEFAULT_BANDWIDTH_LIMIT) ||
++ ((GstHLSVariantStream*)j->data)->bandwidth <= max_limit))
++ variant = j->data; /* get the lowest one in the valid range */
++ else
++ variant = tmp;
++ break;
++ }
++
++ if (adj_max_limit > DEFAULT_BANDWIDTH_LIMIT && adj_max_limit < tmp->bandwidth)
++ continue;
++
++ if (((width > DEFAULT_RESOLUTION_LIMIT) && (tmp->width > width)) ||
++ ((height > DEFAULT_RESOLUTION_LIMIT) && (tmp->height > height))) {
++ if (adj_max_limit > DEFAULT_BANDWIDTH_LIMIT && !variant) { /* will be kept with the first one with the same bitrate */
++ variant = tmp;
++ }
++ } else {
++ variant = tmp;
++ GST_DEBUG ("get this stream %d", variant->bandwidth);
++ break;
++ }
++ }
++
++ return (variant)?(variant):(tmp);
++}
++
++#else
+ GstHLSVariantStream *
+ gst_hls_master_playlist_get_variant_for_bitrate (GstHLSMasterPlaylist *
+ playlist, GstHLSVariantStream * current_variant, guint bitrate)
+ {
+ GstHLSVariantStream *variant = current_variant;
+ GList *l;
+
+ /* variant lists are sorted low to high, so iterate from highest to lowest */
+ if (current_variant == NULL || !current_variant->iframe)
+ l = g_list_last (playlist->variants);
+ else
+ l = g_list_last (playlist->iframe_variants);
+
+ while (l != NULL) {
+ variant = l->data;
+ if (variant->bandwidth <= bitrate)
+ break;
+ l = l->prev;
+ }
+
+ return variant;
+ }
++#endif
+
+ GstHLSVariantStream *
+ gst_hls_master_playlist_get_matching_variant (GstHLSMasterPlaylist * playlist,
+ GstHLSVariantStream * current_variant)
+ {
+ if (current_variant->iframe) {
+ return find_variant_stream_by_uri (playlist->iframe_variants,
+ current_variant->uri);
+ }
+
+ return find_variant_stream_by_uri (playlist->variants, current_variant->uri);
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * m3u8.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __M3U8_H__
+ #define __M3U8_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
+ typedef struct _GstM3U8 GstM3U8;
+ typedef struct _GstM3U8MediaFile GstM3U8MediaFile;
+ typedef struct _GstM3U8InitFile GstM3U8InitFile;
+ typedef struct _GstHLSMedia GstHLSMedia;
+ typedef struct _GstM3U8Client GstM3U8Client;
+ typedef struct _GstHLSVariantStream GstHLSVariantStream;
+ typedef struct _GstHLSMasterPlaylist GstHLSMasterPlaylist;
++#ifdef TIZEN_FEATURE_AD
++typedef struct _GstM3U8AdInfo GstM3U8AdInfo;
++typedef struct _GstM3U8Cue GstM3U8Cue;
++typedef struct _GstM3U8CueOutCont GstM3U8CueOutCont;
++#endif
+
+ #define GST_M3U8(m) ((GstM3U8*)m)
+ #define GST_M3U8_MEDIA_FILE(f) ((GstM3U8MediaFile*)f)
+
+ #define GST_M3U8_LOCK(m) g_mutex_lock (&m->lock);
+ #define GST_M3U8_UNLOCK(m) g_mutex_unlock (&m->lock);
+
+ #define GST_M3U8_IS_LIVE(m) ((m)->endlist == FALSE)
+
+ /* hlsdemux must not get closer to the end of a live stream than
+ GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE fragments. Section 6.3.3
+ "Playing the Playlist file" of the HLS draft states that this
+ value is three fragments */
+ #define GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE 3
+
+ struct _GstM3U8
+ {
+ gchar *uri; /* actually downloaded URI */
+ gchar *base_uri; /* URI to use as base for resolving relative URIs.
+ * This will be different to uri in case of redirects */
+ gchar *name; /* This will be the "name" of the playlist, the original
+ * relative/absolute uri in a variant playlist */
+
+ /* parsed info */
+ gboolean endlist; /* if ENDLIST has been reached */
+ gint version; /* last EXT-X-VERSION */
+ GstClockTime targetduration; /* last EXT-X-TARGETDURATION */
+ gboolean allowcache; /* last EXT-X-ALLOWCACHE */
+
+ GList *files;
++#ifdef TIZEN_FEATURE_AD
++ GstM3U8AdInfo *ad_info;
++#endif
+
+ /* state */
+ GList *current_file;
+ GstClockTime current_file_duration; /* Duration of current fragment */
+ gint64 sequence; /* the next sequence for this client */
+ GstClockTime sequence_position; /* position of this sequence */
+ gint64 highest_sequence_number; /* largest seen sequence number */
+ GstClockTime first_file_start; /* timecode of the start of the first fragment in the current media playlist */
+ GstClockTime last_file_end; /* timecode of the end of the last fragment in the current media playlist */
+ GstClockTime duration; /* cached total duration */
+ gint discont_sequence; /* currently expected EXT-X-DISCONTINUITY-SEQUENCE */
+
+ /*< private > */
+ gchar *last_data;
+ GMutex lock;
+
+ gint ref_count; /* ATOMIC */
+ };
+
+ GstM3U8 * gst_m3u8_ref (GstM3U8 * m3u8);
+
+ void gst_m3u8_unref (GstM3U8 * m3u8);
+
+
+ struct _GstM3U8MediaFile
+ {
+ gchar *title;
+ GstClockTime duration;
+ gchar *uri;
+ gint64 sequence; /* the sequence nb of this file */
+ gboolean discont; /* this file marks a discontinuity */
+ gchar *key;
+ guint8 iv[16];
+ gint64 offset, size;
+ gint ref_count; /* ATOMIC */
+ GstM3U8InitFile *init_file; /* Media Initialization (hold ref) */
+ };
+
+ struct _GstM3U8InitFile
+ {
+ gchar *uri;
+ gint64 offset, size;
+ guint ref_count; /* ATOMIC */
+ };
+
++#ifdef TIZEN_FEATURE_AD
++
++struct _GstM3U8Cue
++{
++ GstClockTime start_time; /* EXT-X-CUE-OUT */
++ GstClockTime end_time; /* EXT-X-CUE-IN */
++ GstClockTime duration; /* from EXT-X-CUE-OUT */
++};
++
++struct _GstM3U8CueOutCont
++{
++ GstClockTime timestamp;
++ gchar *cont_data; /* EXT-X-CUE-OUT-CONT */
++};
++
++struct _GstM3U8AdInfo
++{
++ GList *cue; /* GstM3U8Cue */
++ GList *cue_cont; /* GstM3U8CueOutCont */
++};
++
++#endif
++
+ GstM3U8MediaFile * gst_m3u8_media_file_ref (GstM3U8MediaFile * mfile);
+
+ void gst_m3u8_media_file_unref (GstM3U8MediaFile * mfile);
+
+ GstM3U8 * gst_m3u8_new (void);
+
+ gboolean gst_m3u8_update (GstM3U8 * m3u8,
+ gchar * data);
+
+ void gst_m3u8_set_uri (GstM3U8 * m3u8,
+ const gchar * uri,
+ const gchar * base_uri,
+ const gchar * name);
+
+ GstM3U8MediaFile * gst_m3u8_get_next_fragment (GstM3U8 * m3u8,
+ gboolean forward,
+ GstClockTime * sequence_position,
+ gboolean * discont);
+
+ gboolean gst_m3u8_has_next_fragment (GstM3U8 * m3u8,
+ gboolean forward);
+
+ void gst_m3u8_advance_fragment (GstM3U8 * m3u8,
+ gboolean forward);
+
+ GstClockTime gst_m3u8_get_duration (GstM3U8 * m3u8);
+
+ GstClockTime gst_m3u8_get_target_duration (GstM3U8 * m3u8);
+
+ gchar * gst_m3u8_get_uri (GstM3U8 * m3u8);
+
+ gboolean gst_m3u8_is_live (GstM3U8 * m3u8);
+
+ gboolean gst_m3u8_get_seek_range (GstM3U8 * m3u8,
+ gint64 * start,
+ gint64 * stop);
+
+ typedef enum
+ {
+ GST_HLS_MEDIA_TYPE_INVALID = -1,
+ GST_HLS_MEDIA_TYPE_AUDIO,
+ GST_HLS_MEDIA_TYPE_VIDEO,
+ GST_HLS_MEDIA_TYPE_SUBTITLES,
+ GST_HLS_MEDIA_TYPE_CLOSED_CAPTIONS,
+ GST_HLS_N_MEDIA_TYPES
+ } GstHLSMediaType;
+
+ struct _GstHLSMedia {
+ GstHLSMediaType mtype;
+ gchar *group_id;
+ gchar *name;
+ gchar *lang;
+ gchar *uri;
+ gboolean is_default;
+ gboolean autoselect;
+ gboolean forced;
+
+ GstM3U8 *playlist; /* media playlist */
+
+ gint ref_count; /* ATOMIC */
+ };
+
+ GstHLSMedia * gst_hls_media_ref (GstHLSMedia * media);
+
+ void gst_hls_media_unref (GstHLSMedia * media);
+
+ const gchar * gst_hls_media_type_get_name (GstHLSMediaType mtype);
+
+
+ struct _GstHLSVariantStream {
+ gchar *name; /* This will be the "name" of the playlist, the original
+ * relative/absolute uri in a variant playlist */
+ gchar *uri;
+ gchar *codecs;
+ gint bandwidth;
+ gint program_id;
+ gint width;
+ gint height;
+ gboolean iframe;
+
+ gint refcount; /* ATOMIC */
+
+ GstM3U8 *m3u8; /* media playlist */
+
+ /* alternative renditions */
+ gchar *media_groups[GST_HLS_N_MEDIA_TYPES];
+ GList *media[GST_HLS_N_MEDIA_TYPES];
+ };
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++typedef struct
++{
++ gint bandwidth;
++ gint width;
++ gint height;
++} GstM3U8VideoVariantInfo;
++#endif
++
+ GstHLSVariantStream * gst_hls_variant_stream_ref (GstHLSVariantStream * stream);
+
+ void gst_hls_variant_stream_unref (GstHLSVariantStream * stream);
+
+ gboolean gst_hls_variant_stream_is_live (GstHLSVariantStream * stream);
+
+ GstHLSMedia * gst_hls_variant_find_matching_media (GstHLSVariantStream * stream,
+ GstHLSMedia *media);
+
+
+ struct _GstHLSMasterPlaylist
+ {
+ /* Available variant streams, sorted by bitrate (low -> high) */
+ GList *variants;
+ GList *iframe_variants;
-
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ GList *variant_info; /* stream variant info */
++#endif
+ GstHLSVariantStream *default_variant; /* first in the list */
+
+ gint version; /* EXT-X-VERSION */
+
+ gint refcount; /* ATOMIC */
+
+ gboolean is_simple; /* TRUE if simple main media playlist,
+ * FALSE if variant playlist (either
+ * way the variants list will be set) */
+
+ /*< private > */
+ gchar *last_data;
+ };
+
+ GstHLSMasterPlaylist * gst_hls_master_playlist_new_from_data (gchar * data,
+ const gchar * base_uri);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++GstHLSVariantStream * gst_hls_master_playlist_get_variant_for_bandwitdh_limit (GstHLSMasterPlaylist * playlist,
++ GstHLSVariantStream * current_variant,
++ guint bitrate, gchar * start_bandwidth, gint min_bandwidth,
++ gint max_bandwidth, gint width, gint height);
++#else
+ GstHLSVariantStream * gst_hls_master_playlist_get_variant_for_bitrate (GstHLSMasterPlaylist * playlist,
+ GstHLSVariantStream * current_variant,
+ guint bitrate);
++#endif
+ GstHLSVariantStream * gst_hls_master_playlist_get_matching_variant (GstHLSMasterPlaylist * playlist,
+ GstHLSVariantStream * current_variant);
+
+ void gst_hls_master_playlist_unref (GstHLSMasterPlaylist * playlist);
+
+ G_END_DECLS
+
+ #endif /* __M3U8_H__ */
--- /dev/null
-if not hls_crypto_dep.found() and ['auto', 'openssl'].contains(hls_crypto)
- hls_crypto_dep = dependency('openssl', required : false)
+ hls_sources = [
+ 'gsthlsdemux.c',
+ 'gsthlsdemux-util.c',
+ 'gsthlselement.c',
+ 'gsthlsplugin.c',
+ 'gsthlssink.c',
+ 'gsthlssink2.c',
+ 'gstm3u8playlist.c',
+ 'm3u8.c',
+ ]
+
+ hls_cargs = ['-DGST_USE_UNSTABLE_API']
+
+ hls_crypto = get_option('hls-crypto')
+ hls_option = get_option('hls')
+ hls_crypto_dep = dependency('', required : false)
+ # used for unit test
+ hls_dep = dependency('', required : false)
+
+ if hls_option.disabled()
+ subdir_done()
+ endif
+
+ if ['auto', 'nettle'].contains(hls_crypto)
+ hls_crypto_dep = dependency('nettle', version : '>= 3.0', required : false)
+ if hls_crypto_dep.found()
+ hls_cargs += ['-DHAVE_NETTLE']
+ endif
+ endif
+
+ if not hls_crypto_dep.found() and ['auto', 'libgcrypt'].contains(hls_crypto)
+ hls_crypto_dep = cc.find_library('gcrypt', required : false)
+ if hls_crypto_dep.found()
+ hls_cargs += ['-DHAVE_LIBGCRYPT']
+ endif
+ endif
+
++if not hls_crypto_dep.found() and ['auto', 'openssl1.1'].contains(hls_crypto)
++ hls_crypto_dep = dependency('openssl1.1', required : false)
+ if hls_crypto_dep.found()
+ hls_cargs += ['-DHAVE_OPENSSL']
+ endif
+ endif
+
+ if not hls_crypto_dep.found()
+ if hls_crypto == 'auto'
+ message('Enable HLS plugin enable without crypto')
+ elif hls_option.enabled()
+ error('HLS plugin enabled with crypto, but crypto library "@0@" not found'.format(hls_crypto))
+ else
+ subdir_done()
+ endif
+ endif
+
+ gsthls = library('gsthls',
+ hls_sources,
+ c_args : gst_plugins_bad_args + hls_cargs,
+ link_args : noseh_link_args,
+ include_directories : [configinc],
+ dependencies : [gstpbutils_dep, gsttag_dep, gstvideo_dep,
+ gstadaptivedemux_dep, gsturidownloader_dep,
+ hls_crypto_dep, gio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gsthls, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gsthls]
+ hls_dep = declare_dependency(include_directories : include_directories('.'))
--- /dev/null
- gst_static_pad_template_get_caps (&sink_factory));
+ /*
+ * GStreamer
+ * Copyright 2005 Thomas Vander Stichele <thomas@apestaart.org>
+ * Copyright 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright 2008, 2009 Vincent Penquerc'h <ogg.k.ogg.k@googlemail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-katedec
+ * @title: katedec
+ * @see_also: oggdemux
+ *
+ * This element decodes Kate streams.
+ *
+ * [Kate](http://libkate.googlecode.com/) is a free codec
+ * for text based data, such as subtitles. Any number of kate streams can be
+ * embedded in an Ogg stream.
+ *
+ * libkate (see above url) is needed to build this plugin.
+ *
+ * ## Example pipeline
+ *
+ * This explicitly decodes a Kate stream:
+ * |[
+ * gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE
+ * ]|
+ *
+ * This will automatically detect and use any Kate streams multiplexed
+ * in an Ogg stream:
+ * |[
+ * gst-launch-1.0 playbin uri=file:///tmp/test.ogg
+ * ]|
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include <gst/gst.h>
+
+ #include "gstkateelements.h"
+ #include "gstkatespu.h"
+ #include "gstkatedec.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (gst_katedec_debug);
+ #define GST_CAT_DEFAULT gst_katedec_debug
+
+ /* Filter signals and args */
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ ARG_REMOVE_MARKUP = DECODER_BASE_ARG_COUNT
+ };
+
+ /* We don't accept application/x-kate here on purpose for now, since we're
+ * only really interested in subtitle-like things for playback purposes, not
+ * cracktastic complex overlays or presentation images etc. - those should be
+ * fed into a tiger overlay plugin directly */
+ static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("subtitle/x-kate")
+ );
+
+ static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("text/x-raw, format = { pango-markup, utf8 }; "
+ GST_KATE_SPU_MIME_TYPE)
+ );
+
+ GST_DEBUG_CATEGORY (gst_katedec_debug);
+
+ #define gst_kate_dec_parent_class parent_class
+ G_DEFINE_TYPE (GstKateDec, gst_kate_dec, GST_TYPE_ELEMENT);
+ #define _do_init \
+ kate_element_init (plugin); \
+ GST_DEBUG_CATEGORY_INIT (gst_katedec_debug, "katedec", 0, "Kate decoder");
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (katedec, "katedec", GST_RANK_PRIMARY,
+ GST_TYPE_KATE_DEC, _do_init);
+
+ static void gst_kate_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_kate_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static GstFlowReturn gst_kate_dec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static GstStateChangeReturn gst_kate_dec_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_kate_dec_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_kate_dec_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_kate_dec_sink_handle_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_kate_dec_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ /* initialize the plugin's class */
+ static void
+ gst_kate_dec_class_init (GstKateDecClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_kate_dec_set_property;
+ gobject_class->get_property = gst_kate_dec_get_property;
+
+ gst_kate_util_install_decoder_base_properties (gobject_class);
+
+ g_object_class_install_property (gobject_class, ARG_REMOVE_MARKUP,
+ g_param_spec_boolean ("remove-markup", "Remove markup",
+ "Remove markup from decoded text ?", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_kate_dec_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Kate stream text decoder", "Codec/Decoder/Subtitle",
+ "Decodes Kate text streams",
+ "Vincent Penquerc'h <ogg.k.ogg.k@googlemail.com>");
+ }
+
+ /* initialize the new element
+ * instantiate pads and add them to element
+ * set functions
+ * initialize structure
+ */
+ static void
+ gst_kate_dec_init (GstKateDec * dec)
+ {
++ GstCaps *tmp = NULL;
+ GST_DEBUG_OBJECT (dec, "gst_kate_dec_init");
+
+ dec->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_pad_set_chain_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_kate_dec_chain));
+ gst_pad_set_query_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_kate_dec_sink_query));
+ gst_pad_set_event_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_kate_dec_sink_event));
+ gst_pad_use_fixed_caps (dec->sinkpad);
+ gst_pad_set_caps (dec->sinkpad,
++ tmp = gst_static_pad_template_get_caps (&sink_factory));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
+
+ dec->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_set_query_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_kate_dec_src_query));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
+
+ gst_kate_util_decode_base_init (&dec->decoder, TRUE);
+
++ gst_caps_unref(tmp);
+ dec->src_caps = NULL;
+ dec->output_format = GST_KATE_FORMAT_UNDEFINED;
+ dec->remove_markup = FALSE;
+ }
+
+ static void
+ gst_kate_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_kate_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstKateDec *kd = GST_KATE_DEC (object);
+
+ switch (prop_id) {
+ case ARG_REMOVE_MARKUP:
+ g_value_set_boolean (value, kd->remove_markup);
+ break;
+ default:
+ if (!gst_kate_util_decoder_base_get_property (&kd->decoder, object,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+ }
+ }
+
+ static GstFlowReturn
+ gst_kate_dec_handle_kate_event (GstKateDec * kd, const kate_event * ev)
+ {
+ GstFlowReturn rflow = GST_FLOW_OK;
+ GstKateFormat format = GST_KATE_FORMAT_UNDEFINED;
+ gchar *escaped;
+ GstBuffer *buffer;
+ size_t len;
+ gboolean plain = TRUE;
+
+ if (kd->remove_markup && ev->text_markup_type != kate_markup_none) {
+ size_t len0 = ev->len + 1;
+ escaped = g_strdup (ev->text);
+ if (escaped) {
+ kate_text_remove_markup (ev->text_encoding, escaped, &len0);
+ }
+ plain = TRUE;
+ } else if (ev->text_markup_type == kate_markup_none) {
+ /* no pango markup yet, escape text */
+ /* TODO: actually do the pango thing */
+ escaped = g_strdup (ev->text);
+ plain = TRUE;
+ } else {
+ escaped = g_strdup (ev->text);
+ plain = FALSE;
+ }
+
+ if (G_LIKELY (escaped)) {
+ len = strlen (escaped);
+ if (len > 0) {
+ GST_DEBUG_OBJECT (kd, "kate event: %s, escaped %s", ev->text, escaped);
+ buffer = gst_buffer_new_and_alloc (len + 1);
+ if (G_LIKELY (buffer)) {
+ GstCaps *caps;
+ if (plain)
+ format = GST_KATE_FORMAT_TEXT_UTF8;
+ else
+ format = GST_KATE_FORMAT_TEXT_PANGO_MARKUP;
+ if (format != kd->output_format) {
+ caps = gst_caps_new_simple ("text/x-raw", "format", G_TYPE_STRING,
+ (format == GST_KATE_FORMAT_TEXT_UTF8) ? "utf8" : "pango-markup",
+ NULL);
+ gst_pad_push_event (kd->srcpad, gst_event_new_caps (caps));
+ gst_caps_unref (caps);
+ kd->output_format = format;
+ }
+ /* allocate and copy the NULs, but don't include them in passed size */
+ gst_buffer_fill (buffer, 0, escaped, len + 1);
+ gst_buffer_resize (buffer, 0, len);
+ GST_BUFFER_TIMESTAMP (buffer) = ev->start_time * GST_SECOND;
+ GST_BUFFER_DURATION (buffer) =
+ (ev->end_time - ev->start_time) * GST_SECOND;
+ rflow = gst_pad_push (kd->srcpad, buffer);
+ if (rflow == GST_FLOW_NOT_LINKED) {
+ GST_DEBUG_OBJECT (kd, "source pad not linked, ignored");
+ } else if (rflow != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (kd, "failed to push buffer: %s",
+ gst_flow_get_name (rflow));
+ }
+ } else {
+ GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL),
+ ("Failed to create buffer"));
+ rflow = GST_FLOW_ERROR;
+ }
+ } else {
+ GST_WARNING_OBJECT (kd, "Empty string, nothing to do");
+ rflow = GST_FLOW_OK;
+ }
+ g_free (escaped);
+ } else {
+ GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL),
+ ("Failed to allocate string"));
+ rflow = GST_FLOW_ERROR;
+ }
+
+ /* if there's a background paletted bitmap, construct a DVD SPU for it */
+ if (ev->bitmap && ev->palette) {
+ GstBuffer *buffer = gst_kate_spu_encode_spu (kd, ev);
+ if (buffer) {
+ GstCaps *caps;
+
+ GST_BUFFER_TIMESTAMP (buffer) = ev->start_time * GST_SECOND;
+ GST_BUFFER_DURATION (buffer) =
+ (ev->end_time - ev->start_time) * GST_SECOND;
+
+ if (kd->output_format != GST_KATE_FORMAT_SPU) {
+ caps = gst_caps_new_empty_simple (GST_KATE_SPU_MIME_TYPE);
+ gst_pad_push_event (kd->srcpad, gst_event_new_caps (caps));
+ gst_caps_unref (caps);
+ kd->output_format = GST_KATE_FORMAT_SPU;
+ }
+
+ rflow = gst_pad_push (kd->srcpad, buffer);
+ if (rflow == GST_FLOW_NOT_LINKED) {
+ GST_DEBUG_OBJECT (kd, "source pad not linked, ignored");
+ } else if (rflow != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (kd, "failed to push buffer: %s",
+ gst_flow_get_name (rflow));
+ }
+ } else {
+ GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL),
+ ("failed to create SPU from paletted bitmap"));
+ rflow = GST_FLOW_ERROR;
+ }
+ }
+ return rflow;
+ }
+
+ /* GstElement vmethod implementations */
+
+ /* chain function
+ * this function does the actual processing
+ */
+
+ static GstFlowReturn
+ gst_kate_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ GstKateDec *kd = GST_KATE_DEC (parent);
+ const kate_event *ev = NULL;
+ GstFlowReturn rflow = GST_FLOW_OK;
+
+ if (!gst_kate_util_decoder_base_update_segment (&kd->decoder,
+ GST_ELEMENT_CAST (kd), buf)) {
+ GST_WARNING_OBJECT (kd, "Out of segment!");
+ goto not_in_seg;
+ }
+
+ rflow =
+ gst_kate_util_decoder_base_chain_kate_packet (&kd->decoder,
+ GST_ELEMENT_CAST (kd), pad, buf, kd->srcpad, kd->srcpad, &kd->src_caps,
+ &ev);
+ if (G_UNLIKELY (rflow != GST_FLOW_OK)) {
+ gst_buffer_unref (buf);
+ return rflow;
+ }
+
+ if (ev) {
+ rflow = gst_kate_dec_handle_kate_event (kd, ev);
+ }
+
+ not_in_seg:
+ gst_buffer_unref (buf);
+ return rflow;
+ }
+
+ static GstStateChangeReturn
+ gst_kate_dec_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstKateDec *kd = GST_KATE_DEC (element);
+
+ ret = gst_kate_decoder_base_change_state (&kd->decoder, element,
+ parent_class, transition);
+
+ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
+ gst_caps_replace (&kd->src_caps, NULL);
+ }
+
+ return ret;
+ }
+
+ gboolean
+ gst_kate_dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstKateDec *kd = GST_KATE_DEC (parent);
+ gboolean res =
+ gst_kate_decoder_base_sink_query (&kd->decoder, GST_ELEMENT_CAST (kd),
+ pad, parent, query);
+ return res;
+ }
+
+ static gboolean
+ gst_kate_dec_set_caps (GstKateDec * kd, GstCaps * caps)
+ {
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ GstFlowReturn rflow = GST_FLOW_OK;
+
+ if (gst_structure_has_field (structure, "streamheader")) {
+ const GValue *value;
+ GstBuffer *buf;
+ const kate_event *ev;
+
+ value = gst_structure_get_value (structure, "streamheader");
+
+ if (GST_VALUE_HOLDS_BUFFER (value)) {
+ buf = gst_value_get_buffer (value);
+
+ gst_kate_util_decoder_base_chain_kate_packet (&kd->decoder,
+ GST_ELEMENT_CAST (kd), kd->sinkpad, buf, kd->srcpad, kd->srcpad,
+ &kd->src_caps, &ev);
+
+ if (ev) {
+ rflow = gst_kate_dec_handle_kate_event (kd, ev);
+ }
+ } else if (GST_VALUE_HOLDS_ARRAY (value)) {
+ gint i, size = gst_value_array_get_size (value);
+
+ for (i = 0; i < size; i++) {
+ const GValue *v = gst_value_array_get_value (value, i);
+
+ buf = gst_value_get_buffer (v);
+ gst_kate_util_decoder_base_chain_kate_packet (&kd->decoder,
+ GST_ELEMENT_CAST (kd), kd->sinkpad, buf, kd->srcpad, kd->srcpad,
+ &kd->src_caps, &ev);
+
+ if (ev) {
+ rflow = gst_kate_dec_handle_kate_event (kd, ev);
+ if (rflow != GST_FLOW_OK && rflow != GST_FLOW_NOT_LINKED)
+ break;
+ }
+ }
+ } else {
+ GST_WARNING_OBJECT (kd, "Unhandled streamheader type: %s",
+ G_VALUE_TYPE_NAME (value));
+ }
+ }
+
+ return rflow == GST_FLOW_OK || rflow == GST_FLOW_NOT_LINKED;
+ }
+
+ static gboolean
+ gst_kate_dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstKateDec *kd = GST_KATE_DEC (parent);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad, "Event on sink pad: %" GST_PTR_FORMAT, event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:{
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_kate_dec_set_caps (kd, caps);
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* Delay events till we've set caps */
+ if (gst_kate_util_decoder_base_queue_event (&kd->decoder, event,
+ &gst_kate_dec_sink_handle_event, parent, pad)) {
+ return TRUE;
+ }
+
+ res = gst_kate_dec_sink_handle_event (pad, parent, event);
+
+ return res;
+ }
+
+ static gboolean
+ gst_kate_dec_sink_handle_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstKateDec *kd = GST_KATE_DEC (parent);
+
+ GST_LOG_OBJECT (pad, "Handling event on sink pad: %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ break;
+
+ case GST_EVENT_FLUSH_START:
+ gst_kate_util_decoder_base_set_flushing (&kd->decoder, TRUE);
+ break;
+
+ case GST_EVENT_FLUSH_STOP:
+ gst_kate_util_decoder_base_set_flushing (&kd->decoder, FALSE);
+ break;
+
+ case GST_EVENT_TAG:{
+ GstTagList *tags;
+ gst_event_parse_tag (event, &tags);
+ gst_kate_util_decoder_base_add_tags (&kd->decoder, tags, FALSE);
+ gst_event_unref (event);
+ event = gst_kate_util_decoder_base_get_tag_event (&kd->decoder);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static gboolean
+ gst_kate_dec_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstKateDec *kd = GST_KATE_DEC (parent);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad, "Handling query on src pad: %s",
+ GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *caps;
+
+ if (kd->src_caps) {
+ GST_DEBUG_OBJECT (kd, "We have src caps %" GST_PTR_FORMAT,
+ kd->src_caps);
+ caps = gst_caps_copy (kd->src_caps);
+ } else {
+ GST_DEBUG_OBJECT (kd, "We have no src caps, using template caps");
+ caps = gst_static_pad_template_get_caps (&src_factory);
+ }
+
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+ }
--- /dev/null
+ /* GStreamer Musepack decoder plugin
+ * Copyright (C) 2004 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstmusepackdec.h"
+ #include "gstmusepackreader.h"
+
+ GST_DEBUG_CATEGORY (musepackdec_debug);
+ #define GST_CAT_DEFAULT musepackdec_debug
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-musepack, streamversion = (int) { 7, 8 }")
+ );
+
+ #ifdef MPC_FIXED_POINT
+ # if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ # define GST_MPC_FORMAT "S32LE"
+ # else
+ # define GST_MPC_FORMAT "S32BE"
+ # endif
+ #else
+ # if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ # define GST_MPC_FORMAT "F32LE"
+ # else
+ # define GST_MPC_FORMAT "F32BE"
+ # endif
+ #endif
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_MPC_FORMAT ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 8000, 96000 ], " "channels = (int) [ 1, 2 ]")
+ );
+
+ static void gst_musepackdec_dispose (GObject * obj);
+
+ static gboolean gst_musepackdec_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_musepackdec_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_musepackdec_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static gboolean gst_musepackdec_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+ static void gst_musepackdec_loop (GstPad * sinkpad);
+ static GstStateChangeReturn
+ gst_musepackdec_change_state (GstElement * element, GstStateChange transition);
+
+ #define parent_class gst_musepackdec_parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstMusepackDec, gst_musepackdec, GST_TYPE_ELEMENT,
+ GST_DEBUG_CATEGORY_INIT (musepackdec_debug, "musepackdec", 0,
+ "mpc decoder");
+ );
+ GST_ELEMENT_REGISTER_DEFINE (musepackdec, "musepackdec",
+ GST_RANK_PRIMARY, GST_TYPE_MUSEPACK_DEC);
+ static void
+ gst_musepackdec_class_init (GstMusepackDecClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+
+ gst_element_class_set_static_metadata (element_class, "Musepack decoder",
+ "Codec/Decoder/Audio",
+ "Musepack decoder", "Ronald Bultje <rbultje@ronald.bitfreak.net>");
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_musepackdec_dispose);
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_musepackdec_change_state);
+ }
+
+ static void
+ gst_musepackdec_init (GstMusepackDec * musepackdec)
+ {
+ musepackdec->offset = 0;
+ musepackdec->rate = 0;
+ musepackdec->bps = 0;
+
+ musepackdec->r = g_new (mpc_reader, 1);
+
+ musepackdec->sinkpad =
+ gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_activate_function (musepackdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_musepackdec_sink_activate));
+ gst_pad_set_activatemode_function (musepackdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_musepackdec_sink_activate_mode));
+ gst_element_add_pad (GST_ELEMENT (musepackdec), musepackdec->sinkpad);
+
+ musepackdec->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_event_function (musepackdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_musepackdec_src_event));
+ gst_pad_set_query_function (musepackdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_musepackdec_src_query));
+ gst_pad_use_fixed_caps (musepackdec->srcpad);
+ gst_element_add_pad (GST_ELEMENT (musepackdec), musepackdec->srcpad);
+ }
+
+ static void
+ gst_musepackdec_dispose (GObject * obj)
+ {
+ GstMusepackDec *musepackdec = GST_MUSEPACK_DEC (obj);
+
+ g_free (musepackdec->r);
+ musepackdec->r = NULL;
+
+ if (musepackdec->d) {
+ mpc_demux_exit (musepackdec->d);
+ musepackdec->d = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (obj);
+ }
+
+ static void
+ gst_musepackdec_send_newsegment (GstMusepackDec * dec)
+ {
+ GstSegment os = dec->segment;
+
+ os.format = GST_FORMAT_TIME;
+ os.start = gst_util_uint64_scale_int (os.start, GST_SECOND, dec->rate);
+ if (os.stop)
+ os.stop = gst_util_uint64_scale_int (os.stop, GST_SECOND, dec->rate);
+ os.time = gst_util_uint64_scale_int (os.time, GST_SECOND, dec->rate);
+
+ GST_DEBUG_OBJECT (dec, "sending newsegment from %" GST_TIME_FORMAT
+ " to %" GST_TIME_FORMAT ", rate = %.1f", GST_TIME_ARGS (os.start),
+ GST_TIME_ARGS (os.stop), os.rate);
+
+ gst_pad_push_event (dec->srcpad, gst_event_new_segment (&os));
+ }
+
+ static gboolean
+ gst_musepackdec_handle_seek_event (GstMusepackDec * dec, GstEvent * event)
+ {
+ GstSeekType start_type, stop_type;
+ GstSeekFlags flags;
+ GstSegment segment;
+ GstFormat format;
+ gboolean flush;
+ gdouble rate;
+ gint64 start, stop;
+ gint samplerate;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
+ GST_DEBUG_OBJECT (dec, "seek failed: only TIME or DEFAULT format allowed");
+ return FALSE;
+ }
+
+ samplerate = g_atomic_int_get (&dec->rate);
+
+ if (format == GST_FORMAT_TIME) {
+ if (start_type != GST_SEEK_TYPE_NONE)
+ start = gst_util_uint64_scale_int (start, samplerate, GST_SECOND);
+ if (stop_type != GST_SEEK_TYPE_NONE)
+ stop = gst_util_uint64_scale_int (stop, samplerate, GST_SECOND);
+ }
+
+ flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH);
+
+ if (flush)
+ gst_pad_push_event (dec->srcpad, gst_event_new_flush_start ());
+ else
+ gst_pad_pause_task (dec->sinkpad); /* not _stop_task()? */
+
+ GST_PAD_STREAM_LOCK (dec->sinkpad);
+
+ /* operate on segment copy until we know the seek worked */
+ segment = dec->segment;
+
+ gst_segment_do_seek (&segment, rate, GST_FORMAT_DEFAULT,
+ flags, start_type, start, stop_type, stop, NULL);
+
+ gst_pad_push_event (dec->sinkpad, gst_event_new_flush_stop (TRUE));
+
+ GST_DEBUG_OBJECT (dec, "segment: [%" G_GINT64_FORMAT "-%" G_GINT64_FORMAT
+ "] = [%" GST_TIME_FORMAT "-%" GST_TIME_FORMAT "]",
+ segment.start, segment.stop,
+ GST_TIME_ARGS (segment.start * GST_SECOND / dec->rate),
+ GST_TIME_ARGS (segment.stop * GST_SECOND / dec->rate));
+
+ GST_DEBUG_OBJECT (dec, "performing seek to sample %" G_GINT64_FORMAT,
+ segment.start);
+
+ if (segment.start >= segment.duration) {
+ GST_WARNING_OBJECT (dec, "seek out of bounds");
+ goto failed;
+ }
+ if (mpc_demux_seek_sample (dec->d, segment.start) != MPC_STATUS_OK)
+ goto failed;
+
+ if ((flags & GST_SEEK_FLAG_SEGMENT) == GST_SEEK_FLAG_SEGMENT) {
+ GST_DEBUG_OBJECT (dec, "posting SEGMENT_START message");
+
+ gst_element_post_message (GST_ELEMENT (dec),
+ gst_message_new_segment_start (GST_OBJECT (dec), GST_FORMAT_TIME,
+ gst_util_uint64_scale_int (segment.start, GST_SECOND, dec->rate)));
+ }
+
+ if (flush) {
+ gst_pad_push_event (dec->srcpad, gst_event_new_flush_stop (TRUE));
+ }
+
+ segment.position = segment.start;
+ dec->segment = segment;
+ gst_musepackdec_send_newsegment (dec);
+
+ GST_DEBUG_OBJECT (dec, "seek successful");
+
+ gst_pad_start_task (dec->sinkpad,
+ (GstTaskFunction) gst_musepackdec_loop, dec->sinkpad, NULL);
+
+ GST_PAD_STREAM_UNLOCK (dec->sinkpad);
+
+ return TRUE;
+
+ failed:
+ {
+ GST_WARNING_OBJECT (dec, "seek failed");
+ GST_PAD_STREAM_UNLOCK (dec->sinkpad);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_musepackdec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstMusepackDec *dec;
+ gboolean res;
+
+ dec = GST_MUSEPACK_DEC (parent);
+
+ GST_DEBUG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ res = gst_musepackdec_handle_seek_event (dec, event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_musepackdec_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstMusepackDec *musepackdec = GST_MUSEPACK_DEC (parent);
+ GstFormat format;
+ gboolean res = FALSE;
+ gint samplerate;
+
+ samplerate = g_atomic_int_get (&musepackdec->rate);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ gint64 cur, cur_off;
+
+ if (samplerate == 0)
+ goto done;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ GST_OBJECT_LOCK (musepackdec);
+ cur_off = musepackdec->segment.position;
+ GST_OBJECT_UNLOCK (musepackdec);
+
+ if (format == GST_FORMAT_TIME) {
+ cur = gst_util_uint64_scale_int (cur_off, GST_SECOND, samplerate);
+ gst_query_set_position (query, GST_FORMAT_TIME, cur);
+ res = TRUE;
+ } else if (format == GST_FORMAT_DEFAULT) {
+ gst_query_set_position (query, GST_FORMAT_DEFAULT, cur_off);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ gint64 len, len_off;
+
+ if (samplerate == 0)
+ goto done;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ GST_OBJECT_LOCK (musepackdec);
+ len_off = musepackdec->segment.duration;
+ GST_OBJECT_UNLOCK (musepackdec);
+
+ if (format == GST_FORMAT_TIME) {
+ len = gst_util_uint64_scale_int (len_off, GST_SECOND, samplerate);
+ gst_query_set_duration (query, GST_FORMAT_TIME, len);
+ res = TRUE;
+ } else if (format == GST_FORMAT_DEFAULT) {
+ gst_query_set_duration (query, GST_FORMAT_DEFAULT, len_off);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gint64 len, len_off;
+
+ res = TRUE;
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ GST_OBJECT_LOCK (musepackdec);
+ len_off = musepackdec->segment.duration;
+ GST_OBJECT_UNLOCK (musepackdec);
+
+ if (fmt == GST_FORMAT_TIME) {
+ len = gst_util_uint64_scale_int (len_off, GST_SECOND, samplerate);
+ gst_query_set_seeking (query, fmt, TRUE, 0, len);
+ } else if (fmt == GST_FORMAT_DEFAULT) {
+ gst_query_set_seeking (query, fmt, TRUE, 0, len_off);
+ } else {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ done:
+ return res;
+ }
+
+ static gboolean
+ gst_musepack_stream_init (GstMusepackDec * musepackdec)
+ {
+ mpc_streaminfo i;
+ GstTagList *tags;
+ GstCaps *caps;
+ gchar *stream_id;
+
+ /* set up reading */
+ gst_musepack_init_reader (musepackdec->r, musepackdec);
+
+ musepackdec->d = mpc_demux_init (musepackdec->r);
+ if (!musepackdec->d) {
+ GST_ELEMENT_ERROR (musepackdec, STREAM, WRONG_TYPE, (NULL), (NULL));
+ return FALSE;
+ }
+
+ mpc_demux_get_info (musepackdec->d, &i);
+
+ stream_id = gst_pad_create_stream_id (musepackdec->srcpad,
+ GST_ELEMENT_CAST (musepackdec), NULL);
+ gst_pad_push_event (musepackdec->srcpad,
+ gst_event_new_stream_start (stream_id));
+ g_free (stream_id);
+
+ /* capsnego */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, GST_MPC_FORMAT,
+ "layout", G_TYPE_STRING, "interleaved",
+ "channels", G_TYPE_INT, i.channels,
+ "rate", G_TYPE_INT, i.sample_freq, NULL);
+ gst_pad_use_fixed_caps (musepackdec->srcpad);
+ if (!gst_pad_set_caps (musepackdec->srcpad, caps)) {
+ GST_ELEMENT_ERROR (musepackdec, CORE, NEGOTIATION, (NULL), (NULL));
++ gst_caps_unref (caps);
+ return FALSE;
+ }
++ gst_caps_unref (caps);
+
+ g_atomic_int_set (&musepackdec->bps, 4 * i.channels);
+ g_atomic_int_set (&musepackdec->rate, i.sample_freq);
+
+ musepackdec->segment.position = 0;
+ musepackdec->segment.duration = mpc_streaminfo_get_length_samples (&i);
+
+ /* send basic tags */
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, "Musepack", NULL);
+
+ if (i.encoder[0] != '\0' && i.encoder_version > 0) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ENCODER, i.encoder,
+ GST_TAG_ENCODER_VERSION, i.encoder_version, NULL);
+ }
+
+ if (i.bitrate > 0) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, i.bitrate, NULL);
+ } else if (i.average_bitrate > 0.0) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, (guint) i.average_bitrate, NULL);
+ }
+
+ if (i.gain_title != 0 || i.gain_album != 0) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TRACK_GAIN, (gdouble) i.gain_title / 100.0,
+ GST_TAG_ALBUM_GAIN, (gdouble) i.gain_album / 100.0, NULL);
+ }
+
+ if (i.peak_title != 0 && i.peak_title != 32767 &&
+ i.peak_album != 0 && i.peak_album != 32767) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TRACK_PEAK, (gdouble) i.peak_title / 32767.0,
+ GST_TAG_ALBUM_PEAK, (gdouble) i.peak_album / 32767.0, NULL);
+ }
+
+ GST_LOG_OBJECT (musepackdec, "Posting tags: %" GST_PTR_FORMAT, tags);
+ gst_pad_push_event (musepackdec->srcpad, gst_event_new_tag (tags));
+
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_musepackdec_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ return FALSE;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ return FALSE;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+ }
+
+ static gboolean
+ gst_musepackdec_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean result;
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ result = FALSE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ result = gst_pad_start_task (sinkpad,
+ (GstTaskFunction) gst_musepackdec_loop, sinkpad, NULL);
+ } else {
+ result = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ result = FALSE;
+ break;
+ }
+
+ return result;
+ }
+
+ static void
+ gst_musepackdec_loop (GstPad * sinkpad)
+ {
+ GstMusepackDec *musepackdec;
+ GstFlowReturn flow;
+ GstBuffer *out;
+ GstMapInfo info;
+ mpc_frame_info frame;
+ mpc_status err;
+ gint num_samples, samplerate, bitspersample;
+
+ musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad));
+
+ samplerate = g_atomic_int_get (&musepackdec->rate);
+
+ if (samplerate == 0) {
+ if (!gst_musepack_stream_init (musepackdec))
+ goto pause_task;
+
+ gst_musepackdec_send_newsegment (musepackdec);
+ samplerate = g_atomic_int_get (&musepackdec->rate);
+ }
+
+ bitspersample = g_atomic_int_get (&musepackdec->bps);
+
+ out = gst_buffer_new_allocate (NULL, MPC_DECODER_BUFFER_LENGTH * 4, NULL);
+
+ gst_buffer_map (out, &info, GST_MAP_READWRITE);
+ frame.buffer = (MPC_SAMPLE_FORMAT *) info.data;
+ err = mpc_demux_decode (musepackdec->d, &frame);
+ gst_buffer_unmap (out, &info);
+
+ if (err != MPC_STATUS_OK) {
+ GST_ERROR_OBJECT (musepackdec, "Failed to decode sample");
+ GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL));
+ goto pause_task;
+ } else if (frame.bits == -1) {
+ goto eos_and_pause;
+ }
+
+ num_samples = frame.samples;
+
+ gst_buffer_set_size (out, num_samples * bitspersample);
+
+ GST_BUFFER_OFFSET (out) = musepackdec->segment.position;
+ GST_BUFFER_PTS (out) =
+ gst_util_uint64_scale_int (musepackdec->segment.position,
+ GST_SECOND, samplerate);
+ GST_BUFFER_DURATION (out) =
+ gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate);
+
+ musepackdec->segment.position += num_samples;
+
+ GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out)));
+
+ flow = gst_pad_push (musepackdec->srcpad, out);
+ if (flow != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow));
+ goto pause_task;
+ }
+
+ /* check if we're at the end of a configured segment */
+ if (musepackdec->segment.stop != -1 &&
+ musepackdec->segment.position >= musepackdec->segment.stop) {
+ gint64 stop_time;
+
+ GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment");
+
+ if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0)
+ goto eos_and_pause;
+
+ GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message");
+
+ stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop,
+ GST_SECOND, samplerate);
+
+ gst_element_post_message (GST_ELEMENT (musepackdec),
+ gst_message_new_segment_done (GST_OBJECT (musepackdec),
+ GST_FORMAT_TIME, stop_time));
+ gst_pad_push_event (musepackdec->srcpad,
+ gst_event_new_segment_done (GST_FORMAT_TIME, stop_time));
+
+ goto pause_task;
+ }
+
+ return;
+
+ eos_and_pause:
+ {
+ GST_DEBUG_OBJECT (musepackdec, "sending EOS event");
+ gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ());
+ /* fall through to pause */
+ }
+
+ pause_task:
+ {
+ GST_DEBUG_OBJECT (musepackdec, "Pausing task");
+ gst_pad_pause_task (sinkpad);
+ return;
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_musepackdec_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstMusepackDec *musepackdec = GST_MUSEPACK_DEC (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_segment_init (&musepackdec->segment, GST_FORMAT_DEFAULT);
+ musepackdec->segment.position = 0;
+ break;
+ default:
+ break;
+ }
+
+ if (GST_ELEMENT_CLASS (parent_class)->change_state)
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_segment_init (&musepackdec->segment, GST_FORMAT_UNDEFINED);
+ musepackdec->offset = 0;
+ musepackdec->rate = 0;
+ musepackdec->bps = 0;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ }
+
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ return GST_ELEMENT_REGISTER (musepackdec, plugin);
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ musepack,
+ "Musepack decoder", plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME,
+ GST_PACKAGE_ORIGIN)
--- /dev/null
- "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]; "
- /* These caps do not work on my card */
- // "audio/x-adpcm, " "layout = (string) ima, "
- // "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
- // "audio/x-alaw, " "rate = (int) [ 1, MAX ], "
- // "channels = (int) [ 1, 2 ]; "
- // "audio/x-mulaw, " "rate = (int) [ 1, MAX ], "
- // "channels = (int) [ 1, MAX ]"
- )
+ /*
+ * GStreamer
+ *
+ * Copyright (C) 2005 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009-2010 Chris Robinson <chris.kcat@gmail.com>
+ * Copyright (C) 2013 Juan Manuel Borges Caño <juanmabcmail@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-openalsink
+ * @title: openalsink
+ * @see_also: openalsrc
+ * @short_description: capture raw audio samples through OpenAL
+ *
+ * This element plays raw audio samples through OpenAL.
+ *
+ * Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink
+ * ]| will play a sine wave (continuous beep sound) through OpenAL.
+ * |[
+ * gst-launch-1.0 filesrc location=stream.wav ! decodebin ! audioconvert ! openalsink
+ * ]| will play a wav audio file through OpenAL.
+ * |[
+ * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink
+ * ]| will capture and play audio through OpenAL.
+ *
+ */
+
+ /*
+ * DEV:
+ * To get better timing/delay information you may also be interested in this:
+ * http://kcat.strangesoft.net/openal-extensions/SOFT_source_latency.txt
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/gsterror.h>
+
+ GST_DEBUG_CATEGORY_EXTERN (openal_debug);
+ #define GST_CAT_DEFAULT openal_debug
+
+ #include "gstopenalelements.h"
+ #include "gstopenalsink.h"
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++#include <math.h>
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ static void gst_openal_sink_dispose (GObject * object);
+ static void gst_openal_sink_finalize (GObject * object);
+
+ static void gst_openal_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_openal_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static GstCaps *gst_openal_sink_getcaps (GstBaseSink * basesink,
+ GstCaps * filter);
+ static gboolean gst_openal_sink_open (GstAudioSink * audiosink);
+ static gboolean gst_openal_sink_close (GstAudioSink * audiosink);
+ static gboolean gst_openal_sink_prepare (GstAudioSink * audiosink,
+ GstAudioRingBufferSpec * spec);
+ static gboolean gst_openal_sink_unprepare (GstAudioSink * audiosink);
+ static gint gst_openal_sink_write (GstAudioSink * audiosink, gpointer data,
+ guint length);
+ static guint gst_openal_sink_delay (GstAudioSink * audiosink);
+ static void gst_openal_sink_reset (GstAudioSink * audiosink);
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++void set_angles (GstElement * element, float absolute_angle_x,
++ float absolute_angle_y, float absolute_angle_z);
++void apply_rotation (GstElement * element, float rotation_x, float rotation_y,
++ float rotation_z);
++static gboolean gst_openal_sink_src_event (GstElement * element,
++ GstEvent * event);
++
++gfloat source_rotation_y_old = 0;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ #define OPENAL_DEFAULT_DEVICE NULL
+
+ #define OPENAL_MIN_RATE 8000
+ #define OPENAL_MAX_RATE 192000
+
+ enum
+ {
+ PROP_0,
+
+ PROP_DEVICE,
+ PROP_DEVICE_NAME,
+
+ PROP_USER_DEVICE,
+ PROP_USER_CONTEXT,
++#ifndef TIZEN_FEATURE_OALSINK_MODIFICATION
+ PROP_USER_SOURCE
++#else
++ PROP_USER_SOURCE,
++
++ PROP_USE_STREAM_INFO,
++ PROP_STREAM_INFO,
++
++ PROP_SOURCE_ORIENTATION_X_AXIS,
++ PROP_SOURCE_ORIENTATION_Y_AXIS,
++ PROP_SOURCE_ORIENTATION_Z_AXIS,
++
++ PROP_SOURCE_AMBISONIC_TYPE
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
+ };
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++/* Register GTtypes for custom enums */
++#define GST_TYPE_AMBISONICS_TYPE (gst_openalsink_ambisonicstype_get_type ())
++
++static GType
++gst_openalsink_ambisonicstype_get_type (void)
++{
++ static GType ambisonics_type = 0;
++
++ if (!ambisonics_type) {
++ static GEnumValue ambisonics_types[] = {
++ { AMBISONICS_TYPE_UNKNOWN, "Ambisonics type is not determined", "unknown" },
++ { AMBISONICS_TYPE_PERIPHONIC, "Periphonic ambisonics", "periphonic" },
++ { AMBISONICS_TYPE_NON_PERIPHONIC, "Non-periphonic ambisonics", "non-periphonic" },
++ { 0, NULL, NULL },
++ };
++
++ ambisonics_type =
++ g_enum_register_static ("GstOpenalsinkAmbisonicsType", ambisonics_types);
++ }
++
++ return ambisonics_type;
++}
++
++enum
++{
++ /* action signals */
++ ROTATE,
++ /* emit signals */
++ LAST_SIGNAL
++};
++static guint openalsink_signals[LAST_SIGNAL];
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ static GstStaticPadTemplate openalsink_factory =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, " "format = (string) " GST_AUDIO_NE (F64)
+ ", " "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-raw, " "format = (string) " GST_AUDIO_NE (F32) ", "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]; "
+ "audio/x-raw, " "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]; "
+ "audio/x-raw, " "format = (string) " G_STRINGIFY (U8) ", "
++ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]; ")
+ );
+
+ static PFNALCSETTHREADCONTEXTPROC palcSetThreadContext;
+ static PFNALCGETTHREADCONTEXTPROC palcGetThreadContext;
+
+ static inline ALCcontext *
+ pushContext (ALCcontext * context)
+ {
+ ALCcontext *old;
+ if (!palcGetThreadContext || !palcSetThreadContext)
+ return NULL;
+
+ old = palcGetThreadContext ();
+ if (old != context)
+ palcSetThreadContext (context);
+ return old;
+ }
+
+ static inline void
+ popContext (ALCcontext * old, ALCcontext * context)
+ {
+ if (!palcGetThreadContext || !palcSetThreadContext)
+ return;
+
+ if (old != context)
+ palcSetThreadContext (old);
+ }
+
+ static inline ALenum
+ checkALError (const char *fname, unsigned int fline)
+ {
+ ALenum err = alGetError ();
+ if (err != AL_NO_ERROR)
+ g_warning ("%s:%u: context error: %s", fname, fline, alGetString (err));
+ return err;
+ }
+
+ #define checkALError() checkALError(__FILE__, __LINE__)
+
+ G_DEFINE_TYPE (GstOpenALSink, gst_openal_sink, GST_TYPE_AUDIO_SINK);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (openalsink, "openalsink",
+ GST_RANK_SECONDARY, GST_TYPE_OPENAL_SINK, openal_element_init (plugin));
+
+ static void
+ gst_openal_sink_dispose (GObject * object)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (object);
+
+ if (sink->probed_caps)
+ gst_caps_unref (sink->probed_caps);
+ sink->probed_caps = NULL;
+
+ G_OBJECT_CLASS (gst_openal_sink_parent_class)->dispose (object);
+ }
+
+ static void
+ gst_openal_sink_class_init (GstOpenALSinkClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseSinkClass *gstbasesink_class = (GstBaseSinkClass *) klass;
+ GstAudioSinkClass *gstaudiosink_class = (GstAudioSinkClass *) klass;
+
+ if (alcIsExtensionPresent (NULL, "ALC_EXT_thread_local_context")) {
+ palcSetThreadContext = alcGetProcAddress (NULL, "alcSetThreadContext");
+ palcGetThreadContext = alcGetProcAddress (NULL, "alcGetThreadContext");
+ }
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ klass->rotate = set_angles;
++
++ gstelement_class->send_event = GST_DEBUG_FUNCPTR (gst_openal_sink_src_event);
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_openal_sink_dispose);
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_openal_sink_finalize);
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_openal_sink_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_openal_sink_get_property);
+
+ gst_openal_sink_parent_class = g_type_class_peek_parent (klass);
+
+ gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_openal_sink_getcaps);
+
+ gstaudiosink_class->open = GST_DEBUG_FUNCPTR (gst_openal_sink_open);
+ gstaudiosink_class->close = GST_DEBUG_FUNCPTR (gst_openal_sink_close);
+ gstaudiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_openal_sink_prepare);
+ gstaudiosink_class->unprepare = GST_DEBUG_FUNCPTR (gst_openal_sink_unprepare);
+ gstaudiosink_class->write = GST_DEBUG_FUNCPTR (gst_openal_sink_write);
+ gstaudiosink_class->delay = GST_DEBUG_FUNCPTR (gst_openal_sink_delay);
+ gstaudiosink_class->reset = GST_DEBUG_FUNCPTR (gst_openal_sink_reset);
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Human-readable name of the opened device", "", G_PARAM_READABLE));
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "Human-readable name of the device", OPENAL_DEFAULT_DEVICE,
+ G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_USER_DEVICE,
+ g_param_spec_pointer ("user-device", "ALCdevice", "User device",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USER_CONTEXT,
+ g_param_spec_pointer ("user-context", "ALCcontext", "User context",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USER_SOURCE,
+ g_param_spec_uint ("user-source", "ALsource", "User source", 0, UINT_MAX,
+ 0, G_PARAM_READWRITE));
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ g_object_class_install_property (gobject_class, PROP_USE_STREAM_INFO,
++ g_param_spec_uint ("use-stream-info", "UseTizenAudioStreamInfo",
++ "Option to use stream info (0 | 1)", 0, 1, 0, G_PARAM_READWRITE));
++
++ g_object_class_install_property (gobject_class, PROP_STREAM_INFO,
++ g_param_spec_pointer ("stream-info", "TizenAudioStreamInfo", "Stream info",
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_SOURCE_AMBISONIC_TYPE,
++ g_param_spec_enum ("source-ambisonics-type", "ALsourceAmbisonicType",
++ "Type of Ambisonics (Unknown | Periphonic | Non-periphonic)",
++ GST_TYPE_AMBISONICS_TYPE, AMBISONICS_TYPE_UNKNOWN,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_SOURCE_ORIENTATION_X_AXIS,
++ g_param_spec_int ("source-orientation-x", "ALSourceOrientationX",
++ "Source orientation (rotation angle) against X axis, deg.", -90, 90,
++ 0, G_PARAM_READWRITE));
++
++ g_object_class_install_property (gobject_class, PROP_SOURCE_ORIENTATION_Y_AXIS,
++ g_param_spec_int ("source-orientation-y", "ALSourceOrientationY",
++ "Source orientation (rotation angle) against Y axis, deg.", -180, 180,
++ 0, G_PARAM_READWRITE));
++
++ g_object_class_install_property (gobject_class, PROP_SOURCE_ORIENTATION_Z_AXIS,
++ g_param_spec_int ("source-orientation-z", "ALSourceOrientationZ",
++ "Source orientation (rotation angle) against Z axis, deg.", -180, 180,
++ 0, G_PARAM_READWRITE));
++
++ openalsink_signals[ROTATE] =
++ g_signal_new ("rotate",
++ G_TYPE_FROM_CLASS (klass),
++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
++ G_STRUCT_OFFSET (GstOpenALSinkClass, rotate), NULL, NULL, NULL,
++ G_TYPE_NONE, 3, G_TYPE_FLOAT, G_TYPE_FLOAT, G_TYPE_FLOAT);
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ gst_element_class_set_static_metadata (gstelement_class, "OpenAL Audio Sink",
+ "Sink/Audio", "Output audio through OpenAL",
+ "Juan Manuel Borges Caño <juanmabcmail@gmail.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &openalsink_factory);
+
+ }
+
+ static void
+ gst_openal_sink_init (GstOpenALSink * sink)
+ {
+ GST_DEBUG_OBJECT (sink, "initializing");
+
+ sink->device_name = g_strdup (OPENAL_DEFAULT_DEVICE);
+
+ sink->user_device = NULL;
+ sink->user_context = NULL;
+ sink->user_source = 0;
+
+ sink->default_device = NULL;
+ sink->default_context = NULL;
+ sink->default_source = 0;
+
+ sink->buffer_idx = 0;
+ sink->buffer_count = 0;
+ sink->buffers = NULL;
+ sink->buffer_length = 0;
+
+ sink->write_reset = AL_FALSE;
+ sink->probed_caps = NULL;
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ sink->use_stream_info = 0;
++ sink->stream_info = NULL;
++
++ sink->ambisonic_type = AMBISONICS_TYPE_UNKNOWN;
++ sink->source_rotation_x = 0.f;
++ sink->source_rotation_y = 0.f;
++ sink->source_rotation_z = 0.f;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ g_mutex_init (&sink->openal_lock);
+ }
+
+ static void
+ gst_openal_sink_finalize (GObject * object)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (object);
+
+ g_free (sink->device_name);
+ sink->device_name = NULL;
+ g_mutex_clear (&sink->openal_lock);
+
+ G_OBJECT_CLASS (gst_openal_sink_parent_class)->finalize (object);
+ }
+
+ static void
+ gst_openal_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (object);
+
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_free (sink->device_name);
+ sink->device_name = g_value_dup_string (value);
+ if (sink->probed_caps)
+ gst_caps_unref (sink->probed_caps);
+ sink->probed_caps = NULL;
+ break;
+ case PROP_USER_DEVICE:
+ if (!sink->default_device)
+ sink->user_device = g_value_get_pointer (value);
+ break;
+ case PROP_USER_CONTEXT:
+ if (!sink->default_device)
+ sink->user_context = g_value_get_pointer (value);
+ break;
+ case PROP_USER_SOURCE:
+ if (!sink->default_device)
+ sink->user_source = g_value_get_uint (value);
+ break;
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ case PROP_USE_STREAM_INFO:
++ sink->use_stream_info = g_value_get_uint (value);
++ break;
++ case PROP_STREAM_INFO:
++ if (!sink->stream_info)
++ sink->stream_info = g_value_get_pointer (value);
++ break;
++ case PROP_SOURCE_AMBISONIC_TYPE:
++ sink->ambisonic_type = g_value_get_enum (value);
++ GST_DEBUG_OBJECT (sink, "sink->user_source %d", sink->user_source);
++ break;
++ case PROP_SOURCE_ORIENTATION_X_AXIS:
++ sink->source_rotation_x = (ALfloat)g_value_get_int (value) / 180 * M_PI;
++ break;
++ case PROP_SOURCE_ORIENTATION_Y_AXIS:
++ sink->source_rotation_y = (ALfloat)g_value_get_int (value) / 180 * M_PI;
++ break;
++ case PROP_SOURCE_ORIENTATION_Z_AXIS:
++ sink->source_rotation_z = (ALfloat)g_value_get_int (value) / 180 * M_PI;
++ break;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_openal_sink_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (object);
+ const ALCchar *device_name = sink->device_name;
+ ALCdevice *device = sink->default_device;
+ ALCcontext *context = sink->default_context;
+ ALuint source = sink->default_source;
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ sound_stream_info_h stream_info = sink->stream_info;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ switch (prop_id) {
+ case PROP_DEVICE_NAME:
+ device_name = "";
+ if (device)
+ device_name = alcGetString (device, ALC_DEVICE_SPECIFIER);
+ /* fall-through */
+ case PROP_DEVICE:
+ g_value_set_string (value, device_name);
+ break;
+ case PROP_USER_DEVICE:
+ if (!device)
+ device = sink->user_device;
+ g_value_set_pointer (value, device);
+ break;
+ case PROP_USER_CONTEXT:
+ if (!context)
+ context = sink->user_context;
+ g_value_set_pointer (value, context);
+ break;
+ case PROP_USER_SOURCE:
+ if (!source)
+ source = sink->user_source;
+ g_value_set_uint (value, source);
+ break;
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ case PROP_USE_STREAM_INFO:
++ g_value_set_uint (value, sink->use_stream_info);
++ break;
++ case PROP_STREAM_INFO:
++ if (!stream_info)
++ stream_info = sink->stream_info;
++ g_value_set_pointer (value, stream_info);
++ break;
++ case PROP_SOURCE_AMBISONIC_TYPE:
++ g_value_set_enum (value, sink->ambisonic_type);
++ break;
++ case PROP_SOURCE_ORIENTATION_X_AXIS:
++ g_value_set_int (value, (int)(sink->source_rotation_x / M_PI * 180));
++ break;
++ case PROP_SOURCE_ORIENTATION_Y_AXIS:
++ g_value_set_int (value, (int)(sink->source_rotation_y / M_PI * 180));
++ break;
++ case PROP_SOURCE_ORIENTATION_Z_AXIS:
++ g_value_set_int (value, (int)(sink->source_rotation_z / M_PI * 180));
++ break;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static GstCaps *
+ gst_openal_helper_probe_caps (ALCcontext * context)
+ {
+ static const struct
+ {
+ gint count;
+ GstAudioChannelPosition positions[8];
+ } chans[] = {
+ {
+ 1, {
+ GST_AUDIO_CHANNEL_POSITION_MONO}
+ }, {
+ 2, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}
+ }, {
+ 4, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}
+ }, {
+ 6, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}
+ }, {
+ 7, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
+ }, {
+ 8, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}
+ },};
+ GstStructure *structure;
+ guint64 channel_mask;
+ GstCaps *caps;
+ ALCcontext *old;
+
+ old = pushContext (context);
+
+ caps = gst_caps_new_empty ();
+
+ if (alIsExtensionPresent ("AL_EXT_MCFORMATS")) {
+ const char *fmt32[] = {
+ "AL_FORMAT_MONO_FLOAT32",
+ "AL_FORMAT_STEREO_FLOAT32",
+ "AL_FORMAT_QUAD32",
+ "AL_FORMAT_51CHN32",
+ "AL_FORMAT_61CHN32",
+ "AL_FORMAT_71CHN32",
+ NULL
+ }, *fmt16[] = {
+ "AL_FORMAT_MONO16",
+ "AL_FORMAT_STEREO16",
+ "AL_FORMAT_QUAD16",
+ "AL_FORMAT_51CHN16",
+ "AL_FORMAT_61CHN16", "AL_FORMAT_71CHN16", NULL}, *fmt8[] = {
+ "AL_FORMAT_MONO8",
+ "AL_FORMAT_STEREO8",
+ "AL_FORMAT_QUAD8",
+ "AL_FORMAT_51CHN8", "AL_FORMAT_61CHN8", "AL_FORMAT_71CHN8", NULL};
+ int i;
+
+ if (alIsExtensionPresent ("AL_EXT_FLOAT32")) {
+ for (i = 0; fmt32[i]; i++) {
+ ALenum value = alGetEnumValue (fmt32[i]);
+ if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
+ continue;
+
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ GST_AUDIO_NE (F32), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
+ if (chans[i].count > 2) {
+ gst_audio_channel_positions_to_mask (chans[i].positions,
+ chans[i].count, FALSE, &channel_mask);
+ gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
+ channel_mask, NULL);
+ }
+ gst_caps_append_structure (caps, structure);
+ }
+ }
+
+ for (i = 0; fmt16[i]; i++) {
+ ALenum value = alGetEnumValue (fmt16[i]);
+ if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
+ continue;
+
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
+ if (chans[i].count > 2) {
+ gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
+ FALSE, &channel_mask);
+ gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
+ channel_mask, NULL);
+ }
+ gst_caps_append_structure (caps, structure);
+ }
+ for (i = 0; fmt8[i]; i++) {
+ ALenum value = alGetEnumValue (fmt8[i]);
+ if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
+ continue;
+
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ G_STRINGIFY (U8), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", G_TYPE_INT, chans[i].count, NULL);
+ if (chans[i].count > 2) {
+ gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
+ FALSE, &channel_mask);
+ gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
+ channel_mask, NULL);
+ }
+ gst_caps_append_structure (caps, structure);
+ }
+ } else {
+ if (alIsExtensionPresent ("AL_EXT_FLOAT32")) {
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ GST_AUDIO_NE (F32), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ GST_AUDIO_NE (S16), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ gst_caps_append_structure (caps, structure);
+
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ G_STRINGIFY (U8), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
+ if (alIsExtensionPresent ("AL_EXT_double")) {
+ structure =
+ gst_structure_new ("audio/x-raw", "format", G_TYPE_STRING,
+ GST_AUDIO_NE (F64), "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE,
+ OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
+ if (alIsExtensionPresent ("AL_EXT_IMA4")) {
+ structure =
+ gst_structure_new ("audio/x-adpcm", "layout", G_TYPE_STRING, "ima",
+ "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE, OPENAL_MAX_RATE,
+ "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ if (alIsExtensionPresent ("AL_EXT_BFORMAT")) {
++ guint64 channel_mask = GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_LEFT) |
++ GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_RIGHT) |
++ GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_CENTER) |
++ GST_AUDIO_CHANNEL_POSITION_MASK (REAR_CENTER);
++
++ structure =
++ gst_structure_new ("audio/x-raw",
++ "format", G_TYPE_STRING, GST_AUDIO_NE (S16),
++ "rate", GST_TYPE_INT_RANGE, OPENAL_MIN_RATE, OPENAL_MAX_RATE,
++ "channels", G_TYPE_INT, 4, NULL);
++ gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
++ channel_mask, NULL);
++ gst_caps_append_structure (caps, structure);
++ }
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ if (alIsExtensionPresent ("AL_EXT_ALAW")) {
+ structure =
+ gst_structure_new ("audio/x-alaw", "rate", GST_TYPE_INT_RANGE,
+ OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2,
+ NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
+ if (alIsExtensionPresent ("AL_EXT_MULAW_MCFORMATS")) {
+ const char *fmtmulaw[] = {
+ "AL_FORMAT_MONO_MULAW",
+ "AL_FORMAT_STEREO_MULAW",
+ "AL_FORMAT_QUAD_MULAW",
+ "AL_FORMAT_51CHN_MULAW",
+ "AL_FORMAT_61CHN_MULAW",
+ "AL_FORMAT_71CHN_MULAW",
+ NULL
+ };
+ int i;
+
+ for (i = 0; fmtmulaw[i]; i++) {
+ ALenum value = alGetEnumValue (fmtmulaw[i]);
+ if (checkALError () != AL_NO_ERROR || value == 0 || value == -1)
+ continue;
+
+ structure =
+ gst_structure_new ("audio/x-mulaw", "rate", GST_TYPE_INT_RANGE,
+ OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", G_TYPE_INT,
+ chans[i].count, NULL);
+ if (chans[i].count > 2) {
+ gst_audio_channel_positions_to_mask (chans[i].positions, chans[i].count,
+ FALSE, &channel_mask);
+ gst_structure_set (structure, "channel-mask", GST_TYPE_BITMASK,
+ channel_mask, NULL);
+ }
+ gst_caps_append_structure (caps, structure);
+ }
+ } else if (alIsExtensionPresent ("AL_EXT_MULAW")) {
+ structure =
+ gst_structure_new ("audio/x-mulaw", "rate", GST_TYPE_INT_RANGE,
+ OPENAL_MIN_RATE, OPENAL_MAX_RATE, "channels", GST_TYPE_INT_RANGE, 1, 2,
+ NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+
+ popContext (old, context);
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_openal_sink_getcaps (GstBaseSink * basesink, GstCaps * filter)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (basesink);
+ GstCaps *caps;
+
+ if (sink->default_device == NULL) {
+ GstPad *pad = GST_BASE_SINK_PAD (basesink);
+ GstCaps *tcaps = gst_pad_get_pad_template_caps (pad);
+ caps = gst_caps_copy (tcaps);
+ gst_caps_unref (tcaps);
+ } else if (sink->probed_caps)
+ caps = gst_caps_copy (sink->probed_caps);
+ else {
+ if (sink->default_context)
+ caps = gst_openal_helper_probe_caps (sink->default_context);
+ else if (sink->user_context)
+ caps = gst_openal_helper_probe_caps (sink->user_context);
+ else {
+ ALCcontext *context = alcCreateContext (sink->default_device, NULL);
+ if (context) {
+ caps = gst_openal_helper_probe_caps (context);
+ alcDestroyContext (context);
+ } else {
+ GST_ELEMENT_WARNING (sink, RESOURCE, FAILED,
+ ("Could not create temporary context."),
+ GST_ALC_ERROR (sink->default_device));
+ caps = NULL;
+ }
+ }
+
+ if (caps && !gst_caps_is_empty (caps))
+ sink->probed_caps = gst_caps_copy (caps);
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ return intersection;
+ } else {
+ return caps;
+ }
+ }
+
+ static gboolean
+ gst_openal_sink_open (GstAudioSink * audiosink)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+
+ if (sink->user_device) {
+ ALCint value = -1;
+ alcGetIntegerv (sink->user_device, ALC_ATTRIBUTES_SIZE, 1, &value);
+ if (value > 0) {
+ if (!sink->user_context
+ || alcGetContextsDevice (sink->user_context) == sink->user_device)
+ sink->default_device = sink->user_device;
+ }
+ } else if (sink->user_context)
+ sink->default_device = alcGetContextsDevice (sink->user_context);
++
++#ifndef TIZEN_FEATURE_OALSINK_MODIFICATION
+ else
+ sink->default_device = alcOpenDevice (sink->device_name);
++#else /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++ else {
++ if (sink->use_stream_info) {
++ GST_DEBUG_OBJECT (sink, "The Tizen Stream Policy API is used.");
++ if (!sink->stream_info) {
++ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED,
++ ("Unable to process stream info."), GST_ALC_ERROR (sink->default_device));
++ return FALSE;
++ }
++ sink->default_device = alcOpenDeviceNew (sink->device_name, sink->stream_info);
++ } else {
++ GST_DEBUG_OBJECT (sink, "The Tizen Stream Policy API is not used.");
++ sink->default_device = alcOpenDevice (sink->device_name);
++ }
++ }
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ if (!sink->default_device) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE,
+ ("Could not open device."), GST_ALC_ERROR (sink->default_device));
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_openal_sink_close (GstAudioSink * audiosink)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+
+ if (!sink->user_device && !sink->user_context) {
+ if (alcCloseDevice (sink->default_device) == ALC_FALSE) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, CLOSE,
+ ("Could not close device."), GST_ALC_ERROR (sink->default_device));
+ return FALSE;
+ }
+ }
+ sink->default_device = NULL;
+
+ if (sink->probed_caps)
+ gst_caps_unref (sink->probed_caps);
+ sink->probed_caps = NULL;
+
+ return TRUE;
+ }
+
+ static void
+ gst_openal_sink_parse_spec (GstOpenALSink * sink,
+ const GstAudioRingBufferSpec * spec)
+ {
+ ALuint format = AL_NONE;
+
+ GST_DEBUG_OBJECT (sink,
+ "looking up format for type %d, gst-format %d, and %d channels",
+ spec->type, GST_AUDIO_INFO_FORMAT (&spec->info),
+ GST_AUDIO_INFO_CHANNELS (&spec->info));
+
+ /* Don't need to verify supported formats, since the probed caps will only
+ * report what was detected and we shouldn't get anything different */
+ switch (spec->type) {
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW:
+ switch (GST_AUDIO_INFO_FORMAT (&spec->info)) {
+ case GST_AUDIO_FORMAT_U8:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO8;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO8;
+ break;
+ case 4:
+ format = AL_FORMAT_QUAD8;
+ break;
+ case 6:
+ format = AL_FORMAT_51CHN8;
+ break;
+ case 7:
+ format = AL_FORMAT_61CHN8;
+ break;
+ case 8:
+ format = AL_FORMAT_71CHN8;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_FORMAT_S16:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO16;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO16;
+ break;
+ case 4:
+ format = AL_FORMAT_QUAD16;
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ if (sink->ambisonic_type == 1)
++ format = AL_FORMAT_BFORMAT3D_16; /*FIXME (m.alieskieie): Implement B-format support in more extended way */
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
+ break;
+ case 6:
+ format = AL_FORMAT_51CHN16;
+ break;
+ case 7:
+ format = AL_FORMAT_61CHN16;
+ break;
+ case 8:
+ format = AL_FORMAT_71CHN16;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_FORMAT_F32:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO_FLOAT32;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO_FLOAT32;
+ break;
+ case 4:
+ format = AL_FORMAT_QUAD32;
+ break;
+ case 6:
+ format = AL_FORMAT_51CHN32;
+ break;
+ case 7:
+ format = AL_FORMAT_61CHN32;
+ break;
+ case 8:
+ format = AL_FORMAT_71CHN32;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_FORMAT_F64:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO_DOUBLE_EXT;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO_DOUBLE_EXT;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IMA_ADPCM:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO_IMA4;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO_IMA4;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO_ALAW_EXT;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO_ALAW_EXT;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW:
+ switch (GST_AUDIO_INFO_CHANNELS (&spec->info)) {
+ case 1:
+ format = AL_FORMAT_MONO_MULAW;
+ break;
+ case 2:
+ format = AL_FORMAT_STEREO_MULAW;
+ break;
+ case 4:
+ format = AL_FORMAT_QUAD_MULAW;
+ break;
+ case 6:
+ format = AL_FORMAT_51CHN_MULAW;
+ break;
+ case 7:
+ format = AL_FORMAT_61CHN_MULAW;
+ break;
+ case 8:
+ format = AL_FORMAT_71CHN_MULAW;
+ break;
+ default:
+ break;
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ sink->bytes_per_sample = GST_AUDIO_INFO_BPS (&spec->info);
+ sink->rate = GST_AUDIO_INFO_RATE (&spec->info);
+ sink->channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ sink->format = format;
+ sink->buffer_count = spec->segtotal;
+ sink->buffer_length = spec->segsize;
+ }
+
+ static gboolean
+ gst_openal_sink_prepare (GstAudioSink * audiosink,
+ GstAudioRingBufferSpec * spec)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+ ALCcontext *context, *old;
+
+ if (sink->default_context && !gst_openal_sink_unprepare (audiosink))
+ return FALSE;
+
+ if (sink->user_context)
+ context = sink->user_context;
+ else {
+ ALCint attribs[3] = { 0, 0, 0 };
+
+ /* Don't try to change the playback frequency of an app's device */
+ if (!sink->user_device) {
+ attribs[0] = ALC_FREQUENCY;
+ attribs[1] = GST_AUDIO_INFO_RATE (&spec->info);
+ attribs[2] = 0;
+ }
+
+ context = alcCreateContext (sink->default_device, attribs);
+ if (!context) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED,
+ ("Unable to prepare device."), GST_ALC_ERROR (sink->default_device));
+ return FALSE;
+ }
+ }
+
+ old = pushContext (context);
+
+ if (sink->user_source) {
+ if (!sink->user_context || !alIsSource (sink->user_source)) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, NOT_FOUND, (NULL),
+ ("Invalid source specified for context"));
+ goto fail;
+ }
+ sink->default_source = sink->user_source;
+ } else {
+ ALuint source;
+
+ alGenSources (1, &source);
+ if (checkALError () != AL_NO_ERROR) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Unable to generate source"));
+ goto fail;
+ }
+ sink->default_source = source;
+ }
+
+ gst_openal_sink_parse_spec (sink, spec);
+ if (sink->format == AL_NONE) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
+ ("Unable to get type %d, format %d, and %d channels", spec->type,
+ GST_AUDIO_INFO_FORMAT (&spec->info),
+ GST_AUDIO_INFO_CHANNELS (&spec->info)));
+ goto fail;
+ }
+
+ sink->buffers = g_malloc (sink->buffer_count * sizeof (*sink->buffers));
+ if (!sink->buffers) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, ("Out of memory."),
+ ("Unable to allocate buffers"));
+ goto fail;
+ }
+
+ alGenBuffers (sink->buffer_count, sink->buffers);
+ if (checkALError () != AL_NO_ERROR) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Unable to generate %d buffers", sink->buffer_count));
+ goto fail;
+ }
+ sink->buffer_idx = 0;
+
+ popContext (old, context);
+ sink->default_context = context;
+ return TRUE;
+
+ fail:
+ if (!sink->user_source && sink->default_source)
+ alDeleteSources (1, &sink->default_source);
+ sink->default_source = 0;
+
+ g_free (sink->buffers);
+ sink->buffers = NULL;
+ sink->buffer_count = 0;
+ sink->buffer_length = 0;
+
+ popContext (old, context);
+ if (!sink->user_context)
+ alcDestroyContext (context);
+ return FALSE;
+ }
+
+ static gboolean
+ gst_openal_sink_unprepare (GstAudioSink * audiosink)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+ ALCcontext *old;
+
+ if (!sink->default_context)
+ return TRUE;
+
+ old = pushContext (sink->default_context);
+
+ alSourceStop (sink->default_source);
+ alSourcei (sink->default_source, AL_BUFFER, 0);
+
+ if (!sink->user_source)
+ alDeleteSources (1, &sink->default_source);
+ sink->default_source = 0;
+
+ alDeleteBuffers (sink->buffer_count, sink->buffers);
+ g_free (sink->buffers);
+ sink->buffers = NULL;
+ sink->buffer_idx = 0;
+ sink->buffer_count = 0;
+ sink->buffer_length = 0;
+
+ checkALError ();
+ popContext (old, sink->default_context);
+ if (!sink->user_context)
+ alcDestroyContext (sink->default_context);
+ sink->default_context = NULL;
+
+ return TRUE;
+ }
+
+ static gint
+ gst_openal_sink_write (GstAudioSink * audiosink, gpointer data, guint length)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+ ALint processed, queued, state;
+ ALCcontext *old;
+ gulong rest_us;
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ ALfloat sourceOri[] = { 0.0, 0.0, -1.0, 0.0, 1.0, 0.0 };
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
+
+ g_assert (length == sink->buffer_length);
+
+ old = pushContext (sink->default_context);
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ if (sink->ambisonic_type == 1) {
++ sink->format = AL_FORMAT_BFORMAT3D_16;
++ if (sink->source_rotation_y != source_rotation_y_old) {
++ /* FIXME (m.alieskieie): Implement more appropriate
++ Euler angles -> AL Source orientation transform algorithm */
++ // sourceOri[0] = (-1) * sin (sink->source_rotation_y);
++ // sourceOri[2] = (-1) * cos (sink->source_rotation_y);
++ GST_DEBUG_OBJECT (sink,
++ "Source_rotation_y = %g atx = %g atz = %g\n",
++ sink->source_rotation_y, sourceOri[0], sourceOri[2]);
++ alSourcefv (sink->default_source, AL_ORIENTATION, sourceOri);
++ if (alGetError () != AL_NO_ERROR)
++ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL),
++ ("Failed to set Source's orientation"));
++ source_rotation_y_old = sink->source_rotation_y;
++ }
++ }
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ rest_us =
+ (guint64) (sink->buffer_length / sink->bytes_per_sample) *
+ G_USEC_PER_SEC / sink->rate / sink->channels;
+ do {
+ alGetSourcei (sink->default_source, AL_SOURCE_STATE, &state);
+ alGetSourcei (sink->default_source, AL_BUFFERS_QUEUED, &queued);
+ alGetSourcei (sink->default_source, AL_BUFFERS_PROCESSED, &processed);
+ if (checkALError () != AL_NO_ERROR) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL),
+ ("Source state error detected"));
+ length = 0;
+ goto out_nolock;
+ }
+
+ if (processed > 0 || queued < sink->buffer_count)
+ break;
+ if (state != AL_PLAYING)
+ alSourcePlay (sink->default_source);
+ g_usleep (rest_us);
+ }
+ while (1);
+
+ GST_OPENAL_SINK_LOCK (sink);
+ if (sink->write_reset != AL_FALSE) {
+ sink->write_reset = AL_FALSE;
+ length = 0;
+ goto out;
+ }
+
+ queued -= processed;
+ while (processed-- > 0) {
+ ALuint bid;
+ alSourceUnqueueBuffers (sink->default_source, 1, &bid);
+ }
+ if (state == AL_STOPPED) {
+ /* "Restore" from underruns (not actually needed, but it keeps delay
+ * calculations correct while rebuffering) */
+ alSourceRewind (sink->default_source);
+ }
+
+ alBufferData (sink->buffers[sink->buffer_idx], sink->format,
+ data, sink->buffer_length, sink->rate);
+ alSourceQueueBuffers (sink->default_source, 1,
+ &sink->buffers[sink->buffer_idx]);
+ sink->buffer_idx = (sink->buffer_idx + 1) % sink->buffer_count;
+ queued++;
+
+ if (state != AL_PLAYING && queued == sink->buffer_count)
+ alSourcePlay (sink->default_source);
+
+ if (checkALError () != AL_NO_ERROR) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL),
+ ("Source queue error detected"));
+ goto out;
+ }
+
+ out:
+ GST_OPENAL_SINK_UNLOCK (sink);
+ out_nolock:
+ popContext (old, sink->default_context);
+ return length;
+ }
+
+ static guint
+ gst_openal_sink_delay (GstAudioSink * audiosink)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+ ALint queued, state, offset, delay;
+ ALCcontext *old;
+
+ if (!sink->default_context)
+ return 0;
+
+ GST_OPENAL_SINK_LOCK (sink);
+ old = pushContext (sink->default_context);
+
+ delay = 0;
+ alGetSourcei (sink->default_source, AL_BUFFERS_QUEUED, &queued);
+ /* Order here is important. If the offset is queried after the state and an
+ * underrun occurs in between the two calls, it can end up with a 0 offset
+ * in a playing state, incorrectly reporting a len*queued/bps delay. */
+ alGetSourcei (sink->default_source, AL_BYTE_OFFSET, &offset);
+ alGetSourcei (sink->default_source, AL_SOURCE_STATE, &state);
+
+ /* Note: state=stopped is an underrun, meaning all buffers are processed
+ * and there's no delay when writing the next buffer. Pre-buffering is
+ * state=initial, which will introduce a delay while writing. */
+ if (checkALError () == AL_NO_ERROR && state != AL_STOPPED)
+ delay =
+ ((queued * sink->buffer_length) -
+ offset) / sink->bytes_per_sample / sink->channels / GST_MSECOND;
+
+ popContext (old, sink->default_context);
+ GST_OPENAL_SINK_UNLOCK (sink);
+
+ if (G_UNLIKELY (delay < 0)) {
+ /* make sure we never return a negative delay */
+ GST_WARNING_OBJECT (openal_debug, "negative delay");
+ delay = 0;
+ }
+
+ return delay;
+ }
+
+ static void
+ gst_openal_sink_reset (GstAudioSink * audiosink)
+ {
+ GstOpenALSink *sink = GST_OPENAL_SINK (audiosink);
+ ALCcontext *old;
+
+ GST_OPENAL_SINK_LOCK (sink);
+ old = pushContext (sink->default_context);
+
+ sink->write_reset = AL_TRUE;
+ alSourceStop (sink->default_source);
+ alSourceRewind (sink->default_source);
+ alSourcei (sink->default_source, AL_BUFFER, 0);
+ checkALError ();
+
+ popContext (old, sink->default_context);
+ GST_OPENAL_SINK_UNLOCK (sink);
+ }
++
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++void
++set_angles (GstElement * element, float abs_angle_x, float abs_angle_y,
++ float abs_angle_z)
++{
++ GstOpenALSink *sink = GST_OPENAL_SINK (element);
++ sink->source_rotation_x = abs_angle_x;
++ sink->source_rotation_y = abs_angle_y;
++ sink->source_rotation_z = abs_angle_z;
++ GST_DEBUG_OBJECT (sink, "\nAbsolute angles have been set directly:\n"
++ "x = %g (%g deg.)\n"
++ "y = %g (%g deg.)\n"
++ "z = %g (%g deg.)",
++ sink->source_rotation_x, sink->source_rotation_x / M_PI * 180,
++ sink->source_rotation_y, sink->source_rotation_y / M_PI * 180,
++ sink->source_rotation_z, sink->source_rotation_z / M_PI * 180);
++}
++
++void
++apply_rotation (GstElement * element, float rotation_x, float rotation_y,
++ float rotation_z)
++{
++ GstOpenALSink *sink = GST_OPENAL_SINK (element);
++ sink->source_rotation_x += rotation_x;
++ sink->source_rotation_y += rotation_y;
++ sink->source_rotation_z += rotation_z;
++
++ GST_DEBUG_OBJECT (sink, "\nAbsolute angles have been changed to:\n"
++ "x = %g (%g deg.) delta = %g (%g deg.)\n"
++ "y = %g (%g deg.) delta = %g (%g deg.)\n"
++ "z = %g (%g deg.) delta = %g (%g deg.)",
++ sink->source_rotation_x, sink->source_rotation_x / M_PI * 180,
++ rotation_x, rotation_x / M_PI * 180,
++ sink->source_rotation_y, sink->source_rotation_y / M_PI * 180,
++ rotation_y, rotation_y / M_PI * 180,
++ sink->source_rotation_z, sink->source_rotation_z / M_PI * 180,
++ rotation_z, rotation_z / M_PI * 180);
++}
++
++static gboolean
++gst_openal_sink_src_event (GstElement * element, GstEvent * event)
++{
++ GstOpenALSink *sink = GST_OPENAL_SINK (element);
++ gdouble yaw, pitch, roll;
++
++
++ if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION) {
++ if (gst_navigation_event_get_type (event) == GST_NAVIGATION_EVENT_MOUSE_MOVE) {
++ GstStructure *structure = (GstStructure *) gst_event_get_structure (event);
++ if (gst_structure_get_double (structure, "yaw", &yaw) &&
++ gst_structure_get_double (structure, "pitch", &pitch) &&
++ gst_structure_get_double (structure, "roll", &roll)) {
++
++ GST_DEBUG_OBJECT (sink, "yaw = %g (%g deg.)\n", yaw, yaw / M_PI * 180);
++ GST_DEBUG_OBJECT (sink, "pitch = %g (%g deg.)\n", pitch, pitch / M_PI * 180);
++ GST_DEBUG_OBJECT (sink, "roll = %g (%g deg.)\n", roll, roll / M_PI * 180);
++
++ set_angles(element, pitch, yaw, -roll);
++ }
++ }
++ }
++
++ return GST_ELEMENT_CLASS (gst_openal_sink_parent_class)->send_event (element,
++ event);
++}
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
--- /dev/null
+ /*
+ * GStreamer
+ *
+ * Copyright (C) 2005 Thomas Vander Stichele <thomas@apestaart.org>
+ * Copyright (C) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2009-2010 Chris Robinson <chris.kcat@gmail.com>
+ * Copyright (C) 2013 Juan Manuel Borges Caño <juanmabcmail@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_OPENALSINK_H__
+ #define __GST_OPENALSINK_H__
+
+ #include <gst/gst.h>
+ #include <gst/audio/audio.h>
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++#include <gst/video/navigation.h>
++#include <sound_manager.h>
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ #ifdef _WIN32
+ #include <al.h>
+ #include <alc.h>
+ #include <alext.h>
+ #else
+ #include <AL/al.h>
+ #include <AL/alc.h>
+ #include <AL/alext.h>
+ #endif
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_OPENAL_SINK \
+ (gst_openal_sink_get_type())
+ #define GST_OPENAL_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPENAL_SINK,GstOpenALSink))
+ #define GST_OPENAL_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPENAL_SINK,GstOpenALSinkClass))
+ #define GST_IS_OPENAL_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPENAL_SINK))
+ #define GST_IS_OPENAL_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPENAL_SINK))
+ #define GST_OPENAL_SINK_CAST(obj) \
+ ((GstOpenALSink*)obj)
+
+ #if 1
+ #define GST_ALC_ERROR(Device) ("ALC error: %s", alcGetString((Device), alcGetError((Device))))
+ #else
+ #define GST_ALC_ERROR(Device) ("ALC error: 0x%x", alcGetError((Device)))
+ #endif
+
+ typedef struct _GstOpenALSink GstOpenALSink;
+ typedef struct _GstOpenALSinkClass GstOpenALSinkClass;
+
+ #define GST_OPENAL_SINK_GET_LOCK(obj) (&GST_OPENAL_SINK_CAST(obj)->openal_lock)
+ #define GST_OPENAL_SINK_LOCK(obj) (g_mutex_lock(GST_OPENAL_SINK_GET_LOCK(obj)))
+ #define GST_OPENAL_SINK_UNLOCK(obj) (g_mutex_unlock(GST_OPENAL_SINK_GET_LOCK(obj)))
+
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++typedef enum {
++ AMBISONICS_TYPE_UNKNOWN = 0,
++ AMBISONICS_TYPE_PERIPHONIC = 1, /**< To comply with Google's Spatial Audio RFC*/
++ AMBISONICS_TYPE_NON_PERIPHONIC = 2,
++} AmbisonicsType;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ struct _GstOpenALSink
+ {
+ GstAudioSink sink;
+
+ gchar *device_name;
+
+ ALCdevice *default_device;
+ /* When set, device is not owned */
+ ALCdevice *user_device;
+
+ ALCcontext *default_context;
+ /* When set, device or context is not owned */
+ ALCcontext *user_context;
+
+ ALuint default_source;
+ /* When set, source is not owned */
+ ALuint user_source;
+
+ ALuint buffer_idx;
+ ALuint buffer_count;
+ ALuint *buffers;
+ ALuint buffer_length;
+
+ ALenum format;
+ ALuint rate;
+ ALuint channels;
+ ALuint bytes_per_sample;
+
+ ALboolean write_reset;
+
+ GstCaps *probed_caps;
+
+ GMutex openal_lock;
++
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ ALuint use_stream_info;
++ sound_stream_info_h stream_info;
++
++ AmbisonicsType ambisonic_type;
++ ALfloat source_rotation_x;
++ ALfloat source_rotation_y;
++ ALfloat source_rotation_z;
++
++ gulong signal_id;
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
++
+ };
+
+ struct _GstOpenALSinkClass
+ {
+ GstAudioSinkClass parent_class;
++
++#ifdef TIZEN_FEATURE_OALSINK_MODIFICATION
++ void (*rotate) (GstElement * element, float rotation_x, float rotation_y, float rotation_z);
++ void (*event) (GstElement * element, GstEvent * event);
++#endif /* TIZEN_FEATURE_OALSINK_MODIFICATION */
+ };
+
+ GType gst_openal_sink_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_OPENALSINK_H__ */
--- /dev/null
- dependencies: [gstaudio_dep, openal_dep],
+ openal_dep = dependency('openal', method: 'pkg-config', version: '>= 1.14', required: get_option('openal'))
+
+ if openal_dep.found()
+ gstopenal = library('gstopenal',
+ 'gstopenal.c', 'gstopenalelement.c', 'gstopenalsink.c', 'gstopenalsrc.c',
+ c_args: gst_plugins_bad_args,
+ include_directories: [configinc, libsinc],
++ dependencies: [gstaudio_dep, gstvideo_dep, openal_dep],
+ install: true,
+ install_dir: plugins_install_dir,
+ )
+ pkgconfig.generate(gstopenal, install_dir: plugins_pkgconfig_install_dir)
+ plugins += [gstopenal]
+ endif
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) <2013> Luciana Fujii <luciana.fujii@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-rsvgdec
+ * @title: rsvgdec
+ *
+ * This elements renders SVG graphics.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink
+ * ]| render and show a svg image.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstrsvgdec.h"
+
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY_STATIC (rsvgdec_debug);
+ #define GST_CAT_DEFAULT rsvgdec_debug
+
+ static GstStaticPadTemplate sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/svg+xml; image/svg"));
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ #define GST_RSVG_VIDEO_CAPS GST_VIDEO_CAPS_MAKE ("BGRA")
+ #define GST_RSVG_VIDEO_FORMAT GST_VIDEO_FORMAT_BGRA
+ #else
+ #define GST_RSVG_VIDEO_CAPS GST_VIDEO_CAPS_MAKE ("ARGB")
+ #define GST_RSVG_VIDEO_FORMAT GST_VIDEO_FORMAT_ARGB
+ #endif
+
+ static GstStaticPadTemplate src_factory =
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_RSVG_VIDEO_CAPS));
+
+ #define gst_rsv_dec_parent_class parent_class
+ G_DEFINE_TYPE (GstRsvgDec, gst_rsvg_dec, GST_TYPE_VIDEO_DECODER);
+ GST_ELEMENT_REGISTER_DEFINE (rsvgdec, "rsvgdec", GST_RANK_PRIMARY,
+ GST_TYPE_RSVG_DEC);
+
+ static gboolean gst_rsvg_dec_stop (GstVideoDecoder * decoder);
+ static gboolean gst_rsvg_dec_set_format (GstVideoDecoder * decoder,
+ GstVideoCodecState * state);
+ static GstFlowReturn gst_rsvg_dec_parse (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
+ static GstFlowReturn gst_rsvg_dec_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
+ static GstFlowReturn gst_rsvg_decode_image (GstRsvgDec * rsvg,
+ GstBuffer * buffer, GstVideoCodecFrame * frame);
+
+ static void gst_rsvg_dec_finalize (GObject * object);
+
+ static void
+ gst_rsvg_dec_class_init (GstRsvgDecClass * klass)
+ {
+ GstVideoDecoderClass *video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (rsvgdec_debug, "rsvgdec", 0, "RSVG decoder");
+
+ gst_element_class_set_static_metadata (element_class,
+ "SVG image decoder", "Codec/Decoder/Image",
+ "Uses librsvg to decode SVG images",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+
+ gobject_class->finalize = gst_rsvg_dec_finalize;
+ video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_rsvg_dec_stop);
+ video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_rsvg_dec_set_format);
+ video_decoder_class->parse = GST_DEBUG_FUNCPTR (gst_rsvg_dec_parse);
+ video_decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_rsvg_dec_handle_frame);
+ }
+
+ static void
+ gst_rsvg_dec_init (GstRsvgDec * rsvg)
+ {
+ GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
+ gst_video_decoder_set_packetized (decoder, FALSE);
+ gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
+ (rsvg), TRUE);
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (rsvg));
+ }
+
+ static void
+ gst_rsvg_dec_finalize (GObject * object)
+ {
+ G_OBJECT_CLASS (gst_rsvg_dec_parent_class)->finalize (object);
+ }
+
+
+ #define CAIRO_UNPREMULTIPLY(a,r,g,b) G_STMT_START { \
+ b = (a > 0) ? MIN ((b * 255 + a / 2) / a, 255) : 0; \
+ g = (a > 0) ? MIN ((g * 255 + a / 2) / a, 255) : 0; \
+ r = (a > 0) ? MIN ((r * 255 + a / 2) / a, 255) : 0; \
+ } G_STMT_END
+
+ static void
+ gst_rsvg_decode_unpremultiply (guint8 * data, gint width, gint height)
+ {
+ gint i, j;
+ guint a;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ a = data[3];
+ data[0] = (a > 0) ? MIN ((data[0] * 255 + a / 2) / a, 255) : 0;
+ data[1] = (a > 0) ? MIN ((data[1] * 255 + a / 2) / a, 255) : 0;
+ data[2] = (a > 0) ? MIN ((data[2] * 255 + a / 2) / a, 255) : 0;
+ #else
+ a = data[0];
+ data[1] = (a > 0) ? MIN ((data[1] * 255 + a / 2) / a, 255) : 0;
+ data[2] = (a > 0) ? MIN ((data[2] * 255 + a / 2) / a, 255) : 0;
+ data[3] = (a > 0) ? MIN ((data[3] * 255 + a / 2) / a, 255) : 0;
+ #endif
+ data += 4;
+ }
+ }
+ }
+
+ static GstFlowReturn
+ gst_rsvg_decode_image (GstRsvgDec * rsvg, GstBuffer * buffer,
+ GstVideoCodecFrame * frame)
+ {
+ GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
+ GstFlowReturn ret = GST_FLOW_OK;
+ cairo_t *cr;
+ cairo_surface_t *surface;
+ RsvgHandle *handle;
+ GError *error = NULL;
+ RsvgDimensionData dimension;
+ gdouble scalex, scaley;
+ GstMapInfo minfo;
+ GstVideoFrame vframe;
+ GstVideoCodecState *output_state;
+
+ GST_LOG_OBJECT (rsvg, "parsing svg");
+
+ if (!gst_buffer_map (buffer, &minfo, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
+ return GST_FLOW_ERROR;
+ }
+ handle = rsvg_handle_new_from_data (minfo.data, minfo.size, &error);
+ if (!handle) {
+ GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message);
+ g_error_free (error);
++ gst_buffer_unmap (buffer, &minfo);
+ return GST_FLOW_ERROR;
+ }
+
+ rsvg_handle_get_dimensions (handle, &dimension);
+
+ output_state = gst_video_decoder_get_output_state (decoder);
+ if ((output_state == NULL)
+ || GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width
+ || GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {
+
+ /* Create the output state */
+ if (output_state)
+ gst_video_codec_state_unref (output_state);
+ output_state =
+ gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
+ dimension.width, dimension.height, rsvg->input_state);
+ }
+
+ ret = gst_video_decoder_allocate_output_frame (decoder, frame);
+
+ if (ret != GST_FLOW_OK) {
+ g_object_unref (handle);
+ gst_video_codec_state_unref (output_state);
+ GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s",
+ gst_flow_get_name (ret));
++ gst_buffer_unmap (buffer, &minfo);
+ return ret;
+ }
+
+ GST_LOG_OBJECT (rsvg, "render image at %d x %d",
+ GST_VIDEO_INFO_HEIGHT (&output_state->info),
+ GST_VIDEO_INFO_WIDTH (&output_state->info));
+
+
+ if (!gst_video_frame_map (&vframe,
+ &output_state->info, frame->output_buffer, GST_MAP_READWRITE)) {
+ GST_ERROR_OBJECT (rsvg, "Failed to get SVG image");
+ g_object_unref (handle);
+ gst_video_codec_state_unref (output_state);
++ gst_buffer_unmap (buffer, &minfo);
+ return GST_FLOW_ERROR;
+ }
+ surface =
+ cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (&vframe,
+ 0), CAIRO_FORMAT_ARGB32, GST_VIDEO_FRAME_WIDTH (&vframe),
+ GST_VIDEO_FRAME_HEIGHT (&vframe), GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
+ 0));
+
+ cr = cairo_create (surface);
+ cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR);
+ cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0);
+ cairo_paint (cr);
+ cairo_set_operator (cr, CAIRO_OPERATOR_OVER);
+ cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0);
+
+ scalex = scaley = 1.0;
+ if (GST_VIDEO_INFO_WIDTH (&output_state->info) != dimension.width) {
+ scalex =
+ ((gdouble) GST_VIDEO_INFO_WIDTH (&output_state->info)) /
+ ((gdouble) dimension.width);
+ }
+ if (GST_VIDEO_INFO_HEIGHT (&output_state->info) != dimension.height) {
+ scaley =
+ ((gdouble) GST_VIDEO_INFO_HEIGHT (&output_state->info)) /
+ ((gdouble) dimension.height);
+ }
+ cairo_scale (cr, scalex, scaley);
+ rsvg_handle_render_cairo (handle, cr);
+
+ g_object_unref (handle);
+ cairo_destroy (cr);
+ cairo_surface_destroy (surface);
+
+ /* Now unpremultiply Cairo's ARGB to match GStreamer's */
+ gst_rsvg_decode_unpremultiply (GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0),
+ GST_VIDEO_FRAME_WIDTH (&vframe), GST_VIDEO_FRAME_HEIGHT (&vframe));
+
+ gst_video_codec_state_unref (output_state);
+ gst_buffer_unmap (buffer, &minfo);
+ gst_video_frame_unmap (&vframe);
+
+ return ret;
+ }
+
+
+ static gboolean
+ gst_rsvg_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
+ {
+ GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
+ GstVideoInfo *info = &state->info;
+
+ if (rsvg->input_state)
+ gst_video_codec_state_unref (rsvg->input_state);
+ rsvg->input_state = gst_video_codec_state_ref (state);
+
+ /* Create the output state */
+ state = gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
+ GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
+ rsvg->input_state);
+ gst_video_codec_state_unref (state);
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
+ GstAdapter * adapter, gboolean at_eos)
+ {
+ gboolean completed = FALSE;
+ const guint8 *data;
+ guint size;
+ guint i;
+
+ GST_LOG_OBJECT (decoder, "parse start");
+ size = gst_adapter_available (adapter);
+
+ /* "<svg></svg>" */
+ if (size < 5 + 6)
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+
+ data = gst_adapter_map (adapter, size);
+ if (data == NULL) {
+ GST_ERROR_OBJECT (decoder, "Unable to map memory");
+ return GST_FLOW_ERROR;
+ }
+ for (i = 0; i < size - 4; i++) {
+ if (memcmp (data + i, "<svg", 4) == 0) {
+ gst_adapter_flush (adapter, i);
+
+ size = gst_adapter_available (adapter);
+ if (size < 5 + 6)
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+ data = gst_adapter_map (adapter, size);
+ if (data == NULL) {
+ GST_ERROR_OBJECT (decoder, "Unable to map memory");
+ return GST_FLOW_ERROR;
+ }
+ break;
+ }
+ }
+ /* If start wasn't found: */
+ if (i == size - 4) {
+ gst_adapter_flush (adapter, size - 4);
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+ }
+
+ for (i = size - 6; i >= 5; i--) {
+ if (memcmp (data + i, "</svg>", 6) == 0) {
+ completed = TRUE;
+ size = i + 6;
+ break;
+ }
+ if (memcmp (data + i, "</svg:svg>", 10) == 0) {
+ completed = TRUE;
+ size = i + 10;
+ break;
+ }
+ }
+
+ if (completed) {
+
+ GST_LOG_OBJECT (decoder, "have complete svg of %u bytes", size);
+
+ gst_video_decoder_add_to_frame (decoder, size);
+ return gst_video_decoder_have_frame (decoder);
+ }
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+ }
+
+ static GstFlowReturn
+ gst_rsvg_dec_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+ {
+ GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
+ gboolean ret;
+
+ ret = gst_rsvg_decode_image (rsvg, frame->input_buffer, frame);
+ switch (ret) {
+ case GST_FLOW_OK:
+ ret = gst_video_decoder_finish_frame (decoder, frame);
+ break;
+ default:
+ gst_video_codec_frame_unref (frame);
+ break;
+ }
+
+ GST_LOG_OBJECT (rsvg, "Handle frame done");
+ return ret;
+ }
+
+ static gboolean
+ gst_rsvg_dec_stop (GstVideoDecoder * decoder)
+ {
+ GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
+
+ if (rsvg->input_state) {
+ gst_video_codec_state_unref (rsvg->input_state);
+ rsvg->input_state = NULL;
+ }
+
+ return TRUE;
+ }
--- /dev/null
-/* generic convert function based on caps, no rate
- * used here
+ /* GStreamer pitch controller element
+ * Copyright (C) 2006 Wouter Paesen <wouter@blue-gate.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include <config.h>
+ #endif
+
+ /* FIXME: workaround for SoundTouch.h of version 1.3.1 defining those
+ * variables while it shouldn't. */
+ #undef VERSION
+ #undef PACKAGE_VERSION
+ #undef PACKAGE_TARNAME
+ #undef PACKAGE_STRING
+ #undef PACKAGE_NAME
+ #undef PACKAGE_BUGREPORT
+ #undef PACKAGE
+
+ #include <soundtouch/SoundTouch.h>
+
+ #include <gst/gst.h>
+ #include <gst/audio/audio.h>
+
+ #include "gstpitch.hh"
+ #include <math.h>
+
+ GST_DEBUG_CATEGORY_STATIC (pitch_debug);
+ #define GST_CAT_DEFAULT pitch_debug
+
+ #define GST_PITCH_GET_PRIVATE(o) (o->priv)
+ struct _GstPitchPrivate
+ {
+ gfloat stream_time_ratio;
+
+ GstEvent *pending_segment;
+
+ soundtouch::SoundTouch * st;
+ };
+
+ enum
+ {
+ ARG_0,
+ ARG_OUT_RATE,
+ ARG_RATE,
+ ARG_TEMPO,
+ ARG_PITCH
+ };
+
+ /* For soundtouch 1.4 */
+ #if defined(INTEGER_SAMPLES)
+ #define SOUNDTOUCH_INTEGER_SAMPLES 1
+ #elif defined(FLOAT_SAMPLES)
+ #define SOUNDTOUCH_FLOAT_SAMPLES 1
+ #endif
+
+ #if defined(SOUNDTOUCH_FLOAT_SAMPLES)
+ #define SUPPORTED_CAPS \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (F32) ", " \
+ "rate = (int) [ 8000, MAX ], " \
+ "channels = (int) [ 1, MAX ]"
+ #elif defined(SOUNDTOUCH_INTEGER_SAMPLES)
+ #define SUPPORTED_CAPS \
+ "audio/x-raw, " \
+ "format = (string) " GST_AUDIO_NE (S16) ", " \
+ "rate = (int) [ 8000, MAX ], " \
+ "channels = (int) [ 1, MAX ]"
+ #else
+ #error "Only integer or float samples are supported"
+ #endif
+
+ static GstStaticPadTemplate gst_pitch_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (SUPPORTED_CAPS));
+
+ static GstStaticPadTemplate gst_pitch_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (SUPPORTED_CAPS));
+
+ static void gst_pitch_dispose (GObject * object);
+ static void gst_pitch_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_pitch_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+
+ static gboolean gst_pitch_setcaps (GstPitch * pitch, GstCaps * caps);
+ static GstFlowReturn gst_pitch_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+ static GstStateChangeReturn gst_pitch_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_pitch_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_pitch_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static gboolean gst_pitch_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ #define gst_pitch_parent_class parent_class
+ G_DEFINE_TYPE_WITH_PRIVATE (GstPitch, gst_pitch, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE (pitch, "pitch", GST_RANK_NONE,
+ GST_TYPE_PITCH);
+
+ static void
+ gst_pitch_class_init (GstPitchClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (pitch_debug, "pitch", 0,
+ "audio pitch control element");
+
+ gobject_class->set_property = gst_pitch_set_property;
+ gobject_class->get_property = gst_pitch_get_property;
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_pitch_dispose);
+
+ g_object_class_install_property (gobject_class, ARG_PITCH,
+ g_param_spec_float ("pitch", "Pitch",
+ "Audio stream pitch", 0.1, 10.0, 1.0,
+ (GParamFlags) (G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE |
+ G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property (gobject_class, ARG_TEMPO,
+ g_param_spec_float ("tempo", "Tempo",
+ "Audio stream tempo", 0.1, 10.0, 1.0,
+ (GParamFlags) (G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE |
+ G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property (gobject_class, ARG_RATE,
+ g_param_spec_float ("rate", "Rate",
+ "Audio stream rate", 0.1, 10.0, 1.0,
+ (GParamFlags) (G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE |
+ G_PARAM_STATIC_STRINGS)));
+
+ g_object_class_install_property (gobject_class, ARG_OUT_RATE,
+ g_param_spec_float ("output-rate", "Output Rate",
+ "Output rate on downstream segment events", 0.1, 10.0, 1.0,
+ (GParamFlags) (G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE |
+ G_PARAM_STATIC_STRINGS)));
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_pitch_change_state);
+
+ gst_element_class_add_static_pad_template (element_class, &gst_pitch_src_template);
+ gst_element_class_add_static_pad_template (element_class, &gst_pitch_sink_template);
+
+ gst_element_class_set_static_metadata (element_class, "Pitch controller",
+ "Filter/Effect/Audio", "Control the pitch of an audio stream",
+ "Wouter Paesen <wouter@blue-gate.be>");
+ }
+
+ static void
+ gst_pitch_init (GstPitch * pitch)
+ {
+ pitch->priv = (GstPitchPrivate *) gst_pitch_get_instance_private (pitch);
+
+ pitch->sinkpad =
+ gst_pad_new_from_static_template (&gst_pitch_sink_template, "sink");
+ gst_pad_set_chain_function (pitch->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pitch_chain));
+ gst_pad_set_event_function (pitch->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pitch_sink_event));
+ GST_PAD_SET_PROXY_CAPS (pitch->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (pitch), pitch->sinkpad);
+
+ pitch->srcpad =
+ gst_pad_new_from_static_template (&gst_pitch_src_template, "src");
+ gst_pad_set_event_function (pitch->srcpad,
+ GST_DEBUG_FUNCPTR (gst_pitch_src_event));
+ gst_pad_set_query_function (pitch->srcpad,
+ GST_DEBUG_FUNCPTR (gst_pitch_src_query));
+ GST_PAD_SET_PROXY_CAPS (pitch->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (pitch), pitch->srcpad);
+
+ pitch->priv->st = new soundtouch::SoundTouch ();
+
+ pitch->tempo = 1.0;
+ pitch->rate = 1.0;
+ pitch->out_seg_rate = 1.0;
+ pitch->seg_arate = 1.0;
+ pitch->pitch = 1.0;
+ pitch->next_buffer_time = GST_CLOCK_TIME_NONE;
+ pitch->next_buffer_offset = 0;
+
+ pitch->priv->st->setRate (pitch->rate);
+ pitch->priv->st->setTempo (pitch->tempo * pitch->seg_arate);
+ pitch->priv->st->setPitch (pitch->pitch);
+
+ pitch->priv->stream_time_ratio = 1.0;
+ pitch->min_latency = pitch->max_latency = 0;
+ }
+
+
+ static void
+ gst_pitch_dispose (GObject * object)
+ {
+ GstPitch *pitch = GST_PITCH (object);
+
+ if (pitch->priv->st) {
+ delete pitch->priv->st;
+
+ pitch->priv->st = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_pitch_update_duration (GstPitch * pitch)
+ {
+ GstMessage *m;
+
+ m = gst_message_new_duration_changed (GST_OBJECT (pitch));
+ gst_element_post_message (GST_ELEMENT (pitch), m);
+ }
+
+ static void
+ gst_pitch_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstPitch *pitch = GST_PITCH (object);
+
+ GST_OBJECT_LOCK (pitch);
+ switch (prop_id) {
+ case ARG_TEMPO:
+ pitch->tempo = g_value_get_float (value);
+ pitch->priv->stream_time_ratio =
+ pitch->tempo * pitch->rate * pitch->seg_arate;
+ pitch->priv->st->setTempo (pitch->tempo * pitch->seg_arate);
+ GST_OBJECT_UNLOCK (pitch);
+ gst_pitch_update_duration (pitch);
+ break;
+ case ARG_RATE:
+ pitch->rate = g_value_get_float (value);
+ pitch->priv->stream_time_ratio =
+ pitch->tempo * pitch->rate * pitch->seg_arate;
+ pitch->priv->st->setRate (pitch->rate);
+ GST_OBJECT_UNLOCK (pitch);
+ gst_pitch_update_duration (pitch);
+ break;
+ case ARG_OUT_RATE:
+ /* Has no effect until the next input segment */
+ pitch->out_seg_rate = g_value_get_float (value);
+ GST_OBJECT_UNLOCK (pitch);
+ break;
+ case ARG_PITCH:
+ pitch->pitch = g_value_get_float (value);
+ pitch->priv->st->setPitch (pitch->pitch);
+ GST_OBJECT_UNLOCK (pitch);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ GST_OBJECT_UNLOCK (pitch);
+ break;
+ }
+ }
+
+ static void
+ gst_pitch_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstPitch *pitch = GST_PITCH (object);
+
+ GST_OBJECT_LOCK (pitch);
+ switch (prop_id) {
+ case ARG_TEMPO:
+ g_value_set_float (value, pitch->tempo);
+ break;
+ case ARG_RATE:
+ g_value_set_float (value, pitch->rate);
+ break;
+ case ARG_OUT_RATE:
+ g_value_set_float (value, pitch->out_seg_rate);
+ break;
+ case ARG_PITCH:
+ g_value_set_float (value, pitch->pitch);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pitch);
+ }
+
+ static gboolean
+ gst_pitch_setcaps (GstPitch * pitch, GstCaps * caps)
+ {
+ GstPitchPrivate *priv;
+
+ priv = GST_PITCH_GET_PRIVATE (pitch);
+
+ if (!gst_audio_info_from_caps (&pitch->info, caps))
+ return FALSE;
+
+ GST_OBJECT_LOCK (pitch);
+
+ /* notify the soundtouch instance of this change */
+ priv->st->setSampleRate (pitch->info.rate);
+ priv->st->setChannels (pitch->info.channels);
+
+ GST_OBJECT_UNLOCK (pitch);
+
+ return TRUE;
+ }
+
+ /* send a buffer out */
+ static GstFlowReturn
+ gst_pitch_forward_buffer (GstPitch * pitch, GstBuffer * buffer)
+ {
+ gint samples;
+
+ GST_BUFFER_TIMESTAMP (buffer) = pitch->next_buffer_time;
+ pitch->next_buffer_time += GST_BUFFER_DURATION (buffer);
+
+ samples = GST_BUFFER_OFFSET (buffer);
+ GST_BUFFER_OFFSET (buffer) = pitch->next_buffer_offset;
+ pitch->next_buffer_offset += samples;
+ GST_BUFFER_OFFSET_END (buffer) = pitch->next_buffer_offset;
+
+ GST_LOG ("pushing buffer [%" GST_TIME_FORMAT "]-[%" GST_TIME_FORMAT
+ "] (%d samples)", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (pitch->next_buffer_time), samples);
+
+ return gst_pad_push (pitch->srcpad, buffer);
+ }
+
+ /* extract a buffer from soundtouch */
+ static GstBuffer *
+ gst_pitch_prepare_buffer (GstPitch * pitch)
+ {
+ GstPitchPrivate *priv;
+ guint samples;
+ GstBuffer *buffer;
+ GstMapInfo info;
+
+ priv = GST_PITCH_GET_PRIVATE (pitch);
+
+ GST_LOG_OBJECT (pitch, "preparing buffer");
+
+ samples = pitch->priv->st->numSamples ();
+ if (samples == 0)
+ return NULL;
+
+ buffer = gst_buffer_new_and_alloc (samples * pitch->info.bpf);
+
+ gst_buffer_map (buffer, &info, (GstMapFlags) GST_MAP_READWRITE);
+ samples = priv->st->receiveSamples ((soundtouch::SAMPLETYPE *) info.data, samples);
+ gst_buffer_unmap (buffer, &info);
+
+ if (samples <= 0) {
+ gst_buffer_unref (buffer);
+ return NULL;
+ }
+
+ GST_BUFFER_DURATION (buffer) =
+ gst_util_uint64_scale (samples, GST_SECOND, pitch->info.rate);
+ /* temporary store samples here, to avoid having to recalculate this */
+ GST_BUFFER_OFFSET (buffer) = (gint64) samples;
+
+ return buffer;
+ }
+
+ /* process the last samples, in a later stage we should make sure no more
+ * samples are sent out here as strictly necessary, because soundtouch could
+ * append zero samples, which could disturb looping. */
+ static GstFlowReturn
+ gst_pitch_flush_buffer (GstPitch * pitch, gboolean send)
+ {
+ GstBuffer *buffer;
+
+ if (pitch->priv->st->numUnprocessedSamples() != 0) {
+ GST_DEBUG_OBJECT (pitch, "flushing buffer");
+ pitch->priv->st->flush ();
+ }
+
+ if (!send)
+ return GST_FLOW_OK;
+
+ buffer = gst_pitch_prepare_buffer (pitch);
+
+ if (!buffer)
+ return GST_FLOW_OK;
+
+ return gst_pitch_forward_buffer (pitch, buffer);
+ }
+
+ static gboolean
+ gst_pitch_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstPitch *pitch;
+ gboolean res;
+
+ pitch = GST_PITCH (parent);
+
+ GST_DEBUG_OBJECT (pad, "received %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:{
+ /* transform the event upstream, according to the playback rate */
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gfloat stream_time_ratio;
+ guint32 seqnum;
+
+ GST_OBJECT_LOCK (pitch);
+ stream_time_ratio = pitch->priv->stream_time_ratio;
+ GST_OBJECT_UNLOCK (pitch);
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+
+ seqnum = gst_event_get_seqnum (event);
+
+ gst_event_unref (event);
+
+ if (format == GST_FORMAT_TIME || format == GST_FORMAT_DEFAULT) {
+ cur = (gint64) (cur * stream_time_ratio);
+ if (stop != -1)
+ stop = (gint64) (stop * stream_time_ratio);
+
+ event = gst_event_new_seek (rate, format, flags,
+ cur_type, cur, stop_type, stop);
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_event_default (pad, parent, event);
+ } else {
+ GST_WARNING_OBJECT (pitch,
+ "Seeking only supported in TIME or DEFAULT format");
+ res = FALSE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+ }
+
- if (out_buffer)
++/* generic convert function based on caps, no rate
++ * used here
+ */
+ static gboolean
+ gst_pitch_convert (GstPitch * pitch,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dst_format, gint64 * dst_value)
+ {
+ gboolean res = TRUE;
+ guint sample_size;
+ gint samplerate;
+
+ g_return_val_if_fail (dst_format && dst_value, FALSE);
+
+ GST_OBJECT_LOCK (pitch);
+ sample_size = pitch->info.bpf;
+ samplerate = pitch->info.rate;
+ GST_OBJECT_UNLOCK (pitch);
+
+ if (sample_size == 0 || samplerate == 0) {
+ return FALSE;
+ }
+
+ if (src_format == *dst_format || src_value == -1) {
+ *dst_value = src_value;
+ return TRUE;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dst_format) {
+ case GST_FORMAT_TIME:
+ *dst_value =
+ gst_util_uint64_scale_int (src_value, GST_SECOND,
+ sample_size * samplerate);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dst_value = gst_util_uint64_scale_int (src_value, 1, sample_size);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dst_format) {
+ case GST_FORMAT_BYTES:
+ *dst_value =
+ gst_util_uint64_scale_int (src_value, samplerate * sample_size,
+ GST_SECOND);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dst_value =
+ gst_util_uint64_scale_int (src_value, samplerate, GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dst_format) {
+ case GST_FORMAT_BYTES:
+ *dst_value = gst_util_uint64_scale_int (src_value, sample_size, 1);
+ break;
+ case GST_FORMAT_TIME:
+ *dst_value =
+ gst_util_uint64_scale_int (src_value, GST_SECOND, samplerate);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_pitch_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstPitch *pitch;
+ gboolean res = FALSE;
+ gfloat stream_time_ratio;
+ gint64 next_buffer_offset;
+ GstClockTime next_buffer_time;
+
+ pitch = GST_PITCH (parent);
+
+ GST_LOG ("%s query", GST_QUERY_TYPE_NAME (query));
+
+ GST_OBJECT_LOCK (pitch);
+ stream_time_ratio = pitch->priv->stream_time_ratio;
+ next_buffer_time = pitch->next_buffer_time;
+ next_buffer_offset = pitch->next_buffer_offset;
+ GST_OBJECT_UNLOCK (pitch);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:{
+ GstFormat format;
+ gint64 duration;
+
+ if (!gst_pad_query_default (pad, parent, query)) {
+ GST_DEBUG_OBJECT (pitch, "upstream provided no duration");
+ break;
+ }
+
+ gst_query_parse_duration (query, &format, &duration);
+
+ if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
+ GST_DEBUG_OBJECT (pitch, "not TIME or DEFAULT format");
+ break;
+ }
+ GST_LOG_OBJECT (pitch, "upstream duration: %" G_GINT64_FORMAT, duration);
+ duration = (gint64) (duration / stream_time_ratio);
+ GST_LOG_OBJECT (pitch, "our duration: %" G_GINT64_FORMAT, duration);
+ gst_query_set_duration (query, format, duration);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_POSITION:{
+ GstFormat dst_format;
+ gint64 dst_value;
+
+ gst_query_parse_position (query, &dst_format, &dst_value);
+
+ if (dst_format != GST_FORMAT_TIME && dst_format != GST_FORMAT_DEFAULT) {
+ GST_DEBUG_OBJECT (pitch, "not TIME or DEFAULT format");
+ break;
+ }
+
+ if (dst_format == GST_FORMAT_TIME) {
+ dst_value = next_buffer_time;
+ res = TRUE;
+ } else {
+ dst_value = next_buffer_offset;
+ res = TRUE;
+ }
+
+ if (res) {
+ GST_LOG_OBJECT (pitch, "our position: %" G_GINT64_FORMAT, dst_value);
+ gst_query_set_position (query, dst_format, dst_value);
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_format, dst_format;
+ gint64 src_value, dst_value;
+
+ gst_query_parse_convert (query, &src_format, &src_value,
+ &dst_format, NULL);
+
+ res = gst_pitch_convert (pitch, src_format, src_value,
+ &dst_format, &dst_value);
+
+ if (res) {
+ gst_query_set_convert (query, src_format, src_value,
+ dst_format, dst_value);
+ }
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ GstClockTime min, max;
+ gboolean live;
+ GstPad *peer;
+
+ if ((peer = gst_pad_get_peer (pitch->sinkpad))) {
+ if ((res = gst_pad_query (peer, query))) {
+ gst_query_parse_latency (query, &live, &min, &max);
+
+ GST_DEBUG ("Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ /* add our own latency */
+
+ GST_DEBUG ("Our latency: min %" GST_TIME_FORMAT
+ ", max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pitch->min_latency),
+ GST_TIME_ARGS (pitch->max_latency));
+
+ min += pitch->min_latency;
+ if (max != GST_CLOCK_TIME_NONE)
+ max += pitch->max_latency;
+
+ GST_DEBUG ("Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+ gst_query_set_latency (query, live, min, max);
+ }
+ gst_object_unref (peer);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+ }
+
+ /* this function returns FALSE if not enough data is known to transform the
+ * segment into proper downstream values. If the function does return false
+ * the segment should be stalled until enough information is available.
+ * If the function returns TRUE, event will be replaced by the new downstream
+ * compatible event.
+ */
+ static gboolean
+ gst_pitch_process_segment (GstPitch * pitch, GstEvent ** event)
+ {
+ gint seqnum;
+ gdouble out_seg_rate, our_arate;
+ gfloat stream_time_ratio;
+ GstSegment seg;
+
+ g_return_val_if_fail (event, FALSE);
+
+ GST_OBJECT_LOCK (pitch);
+ stream_time_ratio = pitch->priv->stream_time_ratio;
+ out_seg_rate = pitch->out_seg_rate;
+ GST_OBJECT_UNLOCK (pitch);
+
+ gst_event_copy_segment (*event, &seg);
+
+ if (seg.format != GST_FORMAT_TIME && seg.format != GST_FORMAT_DEFAULT) {
+ GST_WARNING_OBJECT (pitch,
+ "Only NEWSEGMENT in TIME or DEFAULT format supported, sending"
+ "open ended NEWSEGMENT in TIME format.");
+ seg.format = GST_FORMAT_TIME;
+ seg.start = 0;
+ seg.stop = -1;
+ seg.time = 0;
+ }
+
+ /* Figure out how much of the incoming 'rate' we'll apply ourselves */
+ our_arate = seg.rate / out_seg_rate;
+ /* update the output rate variables */
+ seg.rate = out_seg_rate;
+ seg.applied_rate *= our_arate;
+
+ GST_LOG_OBJECT (pitch->sinkpad, "in segment %" GST_SEGMENT_FORMAT, &seg);
+
+ stream_time_ratio = pitch->tempo * pitch->rate * pitch->seg_arate;
+
+ if (stream_time_ratio == 0) {
+ GST_LOG_OBJECT (pitch->sinkpad, "stream_time_ratio is zero");
+ return FALSE;
+ }
+
+ /* Update the playback rate */
+ GST_OBJECT_LOCK (pitch);
+ pitch->seg_arate = our_arate;
+ pitch->priv->stream_time_ratio = stream_time_ratio;
+ pitch->priv->st->setTempo (pitch->tempo * pitch->seg_arate);
+ GST_OBJECT_UNLOCK (pitch);
+
+ seg.start = (gint64) (seg.start / stream_time_ratio);
+ seg.position = (gint64) (seg.position / stream_time_ratio);
+ if (seg.stop != (guint64) - 1)
+ seg.stop = (gint64) (seg.stop / stream_time_ratio);
+ seg.time = (gint64) (seg.time / stream_time_ratio);
+
+ GST_LOG_OBJECT (pitch->sinkpad, "out segment %" GST_SEGMENT_FORMAT, &seg);
+
+ seqnum = gst_event_get_seqnum (*event);
+ gst_event_unref (*event);
+ *event = gst_event_new_segment (&seg);
+ gst_event_set_seqnum (*event, seqnum);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_pitch_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstPitch *pitch;
+
+ pitch = GST_PITCH (parent);
+
+ GST_LOG_OBJECT (pad, "received %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_pitch_flush_buffer (pitch, FALSE);
+ pitch->priv->st->clear ();
+ pitch->next_buffer_offset = 0;
+ pitch->next_buffer_time = GST_CLOCK_TIME_NONE;
+ pitch->min_latency = pitch->max_latency = 0;
+ break;
+ case GST_EVENT_EOS:
+ gst_pitch_flush_buffer (pitch, TRUE);
+ pitch->priv->st->clear ();
+ pitch->min_latency = pitch->max_latency = 0;
+ break;
+ case GST_EVENT_SEGMENT:
+ if (!gst_pitch_process_segment (pitch, &event)) {
+ GST_LOG_OBJECT (pad, "not enough data known, stalling segment");
+ if (GST_PITCH_GET_PRIVATE (pitch)->pending_segment)
+ gst_event_unref (GST_PITCH_GET_PRIVATE (pitch)->pending_segment);
+ GST_PITCH_GET_PRIVATE (pitch)->pending_segment = event;
+ event = NULL;
+ }
+ pitch->priv->st->clear ();
+ pitch->min_latency = pitch->max_latency = 0;
+ break;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_pitch_setcaps (pitch, caps);
+ if (!res) {
+ gst_event_unref (event);
+ goto done;
+ }
+ }
+ default:
+ break;
+ }
+
+ /* and forward it */
+ if (event)
+ res = gst_pad_event_default (pad, parent, event);
+
+ done:
+ return res;
+ }
+
+ static void
+ gst_pitch_update_latency (GstPitch * pitch, GstClockTime timestamp)
+ {
+ GstClockTimeDiff current_latency, min_latency, max_latency;
+
+ current_latency =
+ (GstClockTimeDiff) (timestamp / pitch->priv->stream_time_ratio) -
+ pitch->next_buffer_time;
+
+ min_latency = MIN (pitch->min_latency, current_latency);
+ max_latency = MAX (pitch->max_latency, current_latency);
+
+ if (pitch->min_latency != min_latency || pitch->max_latency != max_latency) {
+ pitch->min_latency = min_latency;
+ pitch->max_latency = max_latency;
+
+ /* FIXME: what about the LATENCY event? It only has
+ * one latency value, should it be current, min or max?
+ * Should it include upstream latencies?
+ */
+
+ gst_element_post_message (GST_ELEMENT (pitch),
+ gst_message_new_latency (GST_OBJECT (pitch)));
+ }
+ }
+
+ static GstFlowReturn
+ gst_pitch_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstPitch *pitch;
+ GstPitchPrivate *priv;
+ GstClockTime timestamp;
+ GstMapInfo info;
+
+ pitch = GST_PITCH (parent);
+ priv = GST_PITCH_GET_PRIVATE (pitch);
+
+ timestamp = GST_BUFFER_TIMESTAMP (buffer);
+
+ // Remember the first time and corresponding offset
+ if (!GST_CLOCK_TIME_IS_VALID (pitch->next_buffer_time)) {
+ gfloat stream_time_ratio;
+ GstFormat out_format = GST_FORMAT_DEFAULT;
+
+ GST_OBJECT_LOCK (pitch);
+ stream_time_ratio = priv->stream_time_ratio;
+ GST_OBJECT_UNLOCK (pitch);
+
+ pitch->next_buffer_time = timestamp / stream_time_ratio;
+ gst_pitch_convert (pitch, GST_FORMAT_TIME, timestamp, &out_format,
+ &pitch->next_buffer_offset);
+ }
+
+ gst_object_sync_values (GST_OBJECT (pitch), pitch->next_buffer_time);
+
+ /* push the received samples on the soundtouch buffer */
+ GST_LOG_OBJECT (pitch, "incoming buffer (%d samples) %" GST_TIME_FORMAT,
+ (gint) (gst_buffer_get_size (buffer) / pitch->info.bpf),
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_PITCH_GET_PRIVATE (pitch)->pending_segment) {
+ GstEvent *event =
+ gst_event_copy (GST_PITCH_GET_PRIVATE (pitch)->pending_segment);
+
+ GST_LOG_OBJECT (pitch, "processing stalled segment");
+ if (!gst_pitch_process_segment (pitch, &event)) {
+ gst_buffer_unref (buffer);
+ gst_event_unref (event);
+ return GST_FLOW_ERROR;
+ }
+
+ if (!gst_pad_event_default (pitch->sinkpad, parent, event)) {
+ gst_buffer_unref (buffer);
+ gst_event_unref (event);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_event_unref (GST_PITCH_GET_PRIVATE (pitch)->pending_segment);
+ GST_PITCH_GET_PRIVATE (pitch)->pending_segment = NULL;
+ }
+
+ gst_buffer_map (buffer, &info, GST_MAP_READ);
+ GST_OBJECT_LOCK (pitch);
+ priv->st->putSamples ((soundtouch::SAMPLETYPE *) info.data, info.size / pitch->info.bpf);
+ GST_OBJECT_UNLOCK (pitch);
+ gst_buffer_unmap (buffer, &info);
+ gst_buffer_unref (buffer);
+
+ /* Calculate latency */
+
+ gst_pitch_update_latency (pitch, timestamp);
+ /* and try to extract some samples from the soundtouch buffer */
+ if (!priv->st->isEmpty ()) {
+ GstBuffer *out_buffer;
+
+ out_buffer = gst_pitch_prepare_buffer (pitch);
++
++ if (out_buffer) {
++#ifdef TIZEN_FEATURE_PITCH_AUDIO_META
++ gint samples = GST_BUFFER_OFFSET (out_buffer);
++
++ if (GST_AUDIO_INFO_LAYOUT (&pitch->info) ==
++ GST_AUDIO_LAYOUT_NON_INTERLEAVED) {
++ gst_buffer_add_audio_meta (out_buffer, &pitch->info, samples, NULL);
++ }
++#endif
+ return gst_pitch_forward_buffer (pitch, out_buffer);
++ }
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static GstStateChangeReturn
+ gst_pitch_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstPitch *pitch = GST_PITCH (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ pitch->next_buffer_time = GST_CLOCK_TIME_NONE;
+ pitch->next_buffer_offset = 0;
+ pitch->priv->st->clear ();
+ pitch->min_latency = pitch->max_latency = 0;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (GST_PITCH_GET_PRIVATE (pitch)->pending_segment) {
+ gst_event_unref (GST_PITCH_GET_PRIVATE (pitch)->pending_segment);
+ GST_PITCH_GET_PRIVATE (pitch)->pending_segment = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ default:
+ break;
+ }
+
+ return ret;
+ }
--- /dev/null
+ /* GStreamer Wayland video sink
+ *
+ * Copyright (C) 2014 Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301 USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include "wldisplay.h"
+ #include "wlbuffer.h"
+ #include "wlvideoformat.h"
+
+ #include <errno.h>
+
+ GST_DEBUG_CATEGORY_EXTERN (gstwayland_debug);
+ #define GST_CAT_DEFAULT gstwayland_debug
+
+ G_DEFINE_TYPE (GstWlDisplay, gst_wl_display, G_TYPE_OBJECT);
+
+ static void gst_wl_display_finalize (GObject * gobject);
+
+ static void
+ gst_wl_display_class_init (GstWlDisplayClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ gobject_class->finalize = gst_wl_display_finalize;
+ }
+
+ static void
+ gst_wl_display_init (GstWlDisplay * self)
+ {
+ self->shm_formats = g_array_new (FALSE, FALSE, sizeof (uint32_t));
+ self->dmabuf_formats = g_array_new (FALSE, FALSE, sizeof (uint32_t));
+ self->wl_fd_poll = gst_poll_new (TRUE);
+ self->buffers = g_hash_table_new (g_direct_hash, g_direct_equal);
+ g_mutex_init (&self->buffers_mutex);
+ }
+
+ static void
+ gst_wl_ref_wl_buffer (gpointer key, gpointer value, gpointer user_data)
+ {
+ g_object_ref (value);
+ }
+
+ static void
+ gst_wl_display_finalize (GObject * gobject)
+ {
+ GstWlDisplay *self = GST_WL_DISPLAY (gobject);
+
+ gst_poll_set_flushing (self->wl_fd_poll, TRUE);
+ if (self->thread)
+ g_thread_join (self->thread);
+
+ /* to avoid buffers being unregistered from another thread
+ * at the same time, take their ownership */
+ g_mutex_lock (&self->buffers_mutex);
+ self->shutting_down = TRUE;
+ g_hash_table_foreach (self->buffers, gst_wl_ref_wl_buffer, NULL);
+ g_mutex_unlock (&self->buffers_mutex);
+
+ g_hash_table_foreach (self->buffers,
+ (GHFunc) gst_wl_buffer_force_release_and_unref, NULL);
+ g_hash_table_remove_all (self->buffers);
+
+ g_array_unref (self->shm_formats);
+ g_array_unref (self->dmabuf_formats);
+ gst_poll_free (self->wl_fd_poll);
+ g_hash_table_unref (self->buffers);
+ g_mutex_clear (&self->buffers_mutex);
+
+ if (self->viewporter)
+ wp_viewporter_destroy (self->viewporter);
+
+ if (self->shm)
+ wl_shm_destroy (self->shm);
+
+ if (self->dmabuf)
+ zwp_linux_dmabuf_v1_destroy (self->dmabuf);
+
+ if (self->wl_shell)
+ wl_shell_destroy (self->wl_shell);
+
+ if (self->xdg_wm_base)
+ xdg_wm_base_destroy (self->xdg_wm_base);
+
+ if (self->fullscreen_shell)
+ zwp_fullscreen_shell_v1_release (self->fullscreen_shell);
+
+ if (self->compositor)
+ wl_compositor_destroy (self->compositor);
+
+ if (self->subcompositor)
+ wl_subcompositor_destroy (self->subcompositor);
+
+ if (self->registry)
+ wl_registry_destroy (self->registry);
+
+ if (self->display_wrapper)
+ wl_proxy_wrapper_destroy (self->display_wrapper);
+
+ if (self->queue)
+ wl_event_queue_destroy (self->queue);
+
+ if (self->own_display) {
+ wl_display_flush (self->display);
+ wl_display_disconnect (self->display);
+ }
+
+ G_OBJECT_CLASS (gst_wl_display_parent_class)->finalize (gobject);
+ }
+
+ static void
+ shm_format (void *data, struct wl_shm *wl_shm, uint32_t format)
+ {
+ GstWlDisplay *self = data;
+
+ g_array_append_val (self->shm_formats, format);
+ }
+
+ static const struct wl_shm_listener shm_listener = {
+ shm_format
+ };
+
+ static void
+ dmabuf_format (void *data, struct zwp_linux_dmabuf_v1 *zwp_linux_dmabuf,
+ uint32_t format)
+ {
+ GstWlDisplay *self = data;
+
+ if (gst_wl_dmabuf_format_to_video_format (format) != GST_VIDEO_FORMAT_UNKNOWN)
+ g_array_append_val (self->dmabuf_formats, format);
+ }
+
+ static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
+ dmabuf_format,
+ };
+
+ gboolean
+ gst_wl_display_check_format_for_shm (GstWlDisplay * display,
+ GstVideoFormat format)
+ {
+ enum wl_shm_format shm_fmt;
+ GArray *formats;
+ guint i;
+
+ shm_fmt = gst_video_format_to_wl_shm_format (format);
+ if (shm_fmt == (enum wl_shm_format) -1)
+ return FALSE;
+
+ formats = display->shm_formats;
+ for (i = 0; i < formats->len; i++) {
+ if (g_array_index (formats, uint32_t, i) == shm_fmt)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ gboolean
+ gst_wl_display_check_format_for_dmabuf (GstWlDisplay * display,
+ GstVideoFormat format)
+ {
+ GArray *formats;
+ guint i, dmabuf_fmt;
+
+ if (!display->dmabuf)
+ return FALSE;
+
+ dmabuf_fmt = gst_video_format_to_wl_dmabuf_format (format);
+ if (dmabuf_fmt == (guint) - 1)
+ return FALSE;
+
+ formats = display->dmabuf_formats;
+ for (i = 0; i < formats->len; i++) {
+ if (g_array_index (formats, uint32_t, i) == dmabuf_fmt)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static void
+ handle_xdg_wm_base_ping (void *user_data, struct xdg_wm_base *xdg_wm_base,
+ uint32_t serial)
+ {
+ xdg_wm_base_pong (xdg_wm_base, serial);
+ }
+
+ static const struct xdg_wm_base_listener xdg_wm_base_listener = {
+ handle_xdg_wm_base_ping
+ };
+
+ static void
+ registry_handle_global (void *data, struct wl_registry *registry,
+ uint32_t id, const char *interface, uint32_t version)
+ {
+ GstWlDisplay *self = data;
+
+ if (g_strcmp0 (interface, "wl_compositor") == 0) {
+ self->compositor = wl_registry_bind (registry, id, &wl_compositor_interface,
+ MIN (version, 3));
+ } else if (g_strcmp0 (interface, "wl_subcompositor") == 0) {
+ self->subcompositor =
+ wl_registry_bind (registry, id, &wl_subcompositor_interface, 1);
+ } else if (g_strcmp0 (interface, "wl_shell") == 0) {
+ self->wl_shell = wl_registry_bind (registry, id, &wl_shell_interface, 1);
+ } else if (g_strcmp0 (interface, "xdg_wm_base") == 0) {
+ self->xdg_wm_base =
+ wl_registry_bind (registry, id, &xdg_wm_base_interface, 1);
+ xdg_wm_base_add_listener (self->xdg_wm_base, &xdg_wm_base_listener, self);
+ } else if (g_strcmp0 (interface, "zwp_fullscreen_shell_v1") == 0) {
+ self->fullscreen_shell = wl_registry_bind (registry, id,
+ &zwp_fullscreen_shell_v1_interface, 1);
+ } else if (g_strcmp0 (interface, "wl_shm") == 0) {
+ self->shm = wl_registry_bind (registry, id, &wl_shm_interface, 1);
+ wl_shm_add_listener (self->shm, &shm_listener, self);
+ } else if (g_strcmp0 (interface, "wp_viewporter") == 0) {
+ self->viewporter =
+ wl_registry_bind (registry, id, &wp_viewporter_interface, 1);
+ } else if (g_strcmp0 (interface, "zwp_linux_dmabuf_v1") == 0) {
+ self->dmabuf =
+ wl_registry_bind (registry, id, &zwp_linux_dmabuf_v1_interface, 1);
+ zwp_linux_dmabuf_v1_add_listener (self->dmabuf, &dmabuf_listener, self);
+ }
+ }
+
+ static void
+ registry_handle_global_remove (void *data, struct wl_registry *registry,
+ uint32_t name)
+ {
+ /* temporarily do nothing */
++ GST_LOG ("Removed global object: name(%d)", name);
+ }
+
+ static const struct wl_registry_listener registry_listener = {
+ registry_handle_global,
+ registry_handle_global_remove
+ };
+
+ static gpointer
+ gst_wl_display_thread_run (gpointer data)
+ {
+ GstWlDisplay *self = data;
+ GstPollFD pollfd = GST_POLL_FD_INIT;
+
+ pollfd.fd = wl_display_get_fd (self->display);
+ gst_poll_add_fd (self->wl_fd_poll, &pollfd);
+ gst_poll_fd_ctl_read (self->wl_fd_poll, &pollfd, TRUE);
+
+ /* main loop */
+ while (1) {
+ while (wl_display_prepare_read_queue (self->display, self->queue) != 0)
+ wl_display_dispatch_queue_pending (self->display, self->queue);
+ wl_display_flush (self->display);
+
+ if (gst_poll_wait (self->wl_fd_poll, GST_CLOCK_TIME_NONE) < 0) {
+ gboolean normal = (errno == EBUSY);
+ wl_display_cancel_read (self->display);
+ if (normal)
+ break;
+ else
+ goto error;
+ }
+ if (wl_display_read_events (self->display) == -1)
+ goto error;
+ wl_display_dispatch_queue_pending (self->display, self->queue);
+ }
+
+ return NULL;
+
+ error:
+ GST_ERROR ("Error communicating with the wayland server");
+ return NULL;
+ }
+
+ GstWlDisplay *
+ gst_wl_display_new (const gchar * name, GError ** error)
+ {
+ struct wl_display *display;
+
+ display = wl_display_connect (name);
+
+ if (!display) {
+ *error = g_error_new (g_quark_from_static_string ("GstWlDisplay"), 0,
+ "Failed to connect to the wayland display '%s'",
+ name ? name : "(default)");
+ return NULL;
+ } else {
+ return gst_wl_display_new_existing (display, TRUE, error);
+ }
+ }
+
+ GstWlDisplay *
+ gst_wl_display_new_existing (struct wl_display * display,
+ gboolean take_ownership, GError ** error)
+ {
+ GstWlDisplay *self;
+ GError *err = NULL;
+ gint i;
+
+ g_return_val_if_fail (display != NULL, NULL);
+
+ self = g_object_new (GST_TYPE_WL_DISPLAY, NULL);
+ self->display = display;
+ self->display_wrapper = wl_proxy_create_wrapper (display);
+ self->own_display = take_ownership;
+
+ self->queue = wl_display_create_queue (self->display);
+ wl_proxy_set_queue ((struct wl_proxy *) self->display_wrapper, self->queue);
+ self->registry = wl_display_get_registry (self->display_wrapper);
+ wl_registry_add_listener (self->registry, ®istry_listener, self);
+
+ /* we need exactly 2 roundtrips to discover global objects and their state */
+ for (i = 0; i < 2; i++) {
+ if (wl_display_roundtrip_queue (self->display, self->queue) < 0) {
+ *error = g_error_new (g_quark_from_static_string ("GstWlDisplay"), 0,
+ "Error communicating with the wayland display");
+ g_object_unref (self);
+ return NULL;
+ }
+ }
+
+ /* verify we got all the required interfaces */
+ #define VERIFY_INTERFACE_EXISTS(var, interface) \
+ if (!self->var) { \
+ g_set_error (error, g_quark_from_static_string ("GstWlDisplay"), 0, \
+ "Could not bind to " interface ". Either it is not implemented in " \
+ "the compositor, or the implemented version doesn't match"); \
+ g_object_unref (self); \
+ return NULL; \
+ }
+
+ VERIFY_INTERFACE_EXISTS (compositor, "wl_compositor");
+ VERIFY_INTERFACE_EXISTS (subcompositor, "wl_subcompositor");
+ VERIFY_INTERFACE_EXISTS (shm, "wl_shm");
+
+ #undef VERIFY_INTERFACE_EXISTS
+
+ /* We make the viewporter optional even though it may cause bad display.
+ * This is so one can test wayland display on older compositor or on
+ * compositor that don't implement this extension. */
+ if (!self->viewporter) {
+ g_warning ("Wayland compositor is missing the ability to scale, video "
+ "display may not work properly.");
+ }
+
+ if (!self->dmabuf) {
+ g_warning ("Could not bind to zwp_linux_dmabuf_v1");
+ }
+
+ if (!self->wl_shell && !self->xdg_wm_base && !self->fullscreen_shell) {
+ /* If wl_surface and wl_display are passed via GstContext
+ * wl_shell, xdg_shell and zwp_fullscreen_shell are not used.
+ * In this case is correct to continue.
+ */
+ g_warning ("Could not bind to either wl_shell, xdg_wm_base or "
+ "zwp_fullscreen_shell, video display may not work properly.");
+ }
+
+ self->thread = g_thread_try_new ("GstWlDisplay", gst_wl_display_thread_run,
+ self, &err);
+ if (err) {
+ g_propagate_prefixed_error (error, err,
+ "Failed to start thread for the display's events");
+ g_object_unref (self);
+ return NULL;
+ }
+
+ return self;
+ }
+
+ void
+ gst_wl_display_register_buffer (GstWlDisplay * self, gpointer gstmem,
+ gpointer wlbuffer)
+ {
+ g_assert (!self->shutting_down);
+
+ GST_TRACE_OBJECT (self, "registering GstWlBuffer %p to GstMem %p",
+ wlbuffer, gstmem);
+
+ g_mutex_lock (&self->buffers_mutex);
+ g_hash_table_replace (self->buffers, gstmem, wlbuffer);
+ g_mutex_unlock (&self->buffers_mutex);
+ }
+
+ gpointer
+ gst_wl_display_lookup_buffer (GstWlDisplay * self, gpointer gstmem)
+ {
+ gpointer wlbuffer;
+ g_mutex_lock (&self->buffers_mutex);
+ wlbuffer = g_hash_table_lookup (self->buffers, gstmem);
+ g_mutex_unlock (&self->buffers_mutex);
+ return wlbuffer;
+ }
+
+ void
+ gst_wl_display_unregister_buffer (GstWlDisplay * self, gpointer gstmem)
+ {
+ GST_TRACE_OBJECT (self, "unregistering GstWlBuffer owned by %p", gstmem);
+
+ g_mutex_lock (&self->buffers_mutex);
+ if (G_LIKELY (!self->shutting_down))
+ g_hash_table_remove (self->buffers, gstmem);
+ g_mutex_unlock (&self->buffers_mutex);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2017 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include "gstwebrtcbin.h"
+ #include "gstwebrtcstats.h"
+ #include "transportstream.h"
+ #include "transportreceivebin.h"
+ #include "utils.h"
+ #include "webrtcsdp.h"
+ #include "webrtctransceiver.h"
+ #include "webrtcdatachannel.h"
+ #include "webrtcsctptransport.h"
+
+ #include "gst/webrtc/webrtc-priv.h"
+
+ #include <gst/rtp/rtp.h>
+
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+
+ #define RANDOM_SESSION_ID \
+ ((((((guint64) g_random_int()) << 32) | \
+ (guint64) g_random_int ())) & \
+ G_GUINT64_CONSTANT (0x7fffffffffffffff))
+
+ #define PC_GET_LOCK(w) (&w->priv->pc_lock)
+ #define PC_LOCK(w) (g_mutex_lock (PC_GET_LOCK(w)))
+ #define PC_UNLOCK(w) (g_mutex_unlock (PC_GET_LOCK(w)))
+
+ #define PC_GET_COND(w) (&w->priv->pc_cond)
+ #define PC_COND_WAIT(w) (g_cond_wait(PC_GET_COND(w), PC_GET_LOCK(w)))
+ #define PC_COND_BROADCAST(w) (g_cond_broadcast(PC_GET_COND(w)))
+ #define PC_COND_SIGNAL(w) (g_cond_signal(PC_GET_COND(w)))
+
+ #define ICE_GET_LOCK(w) (&w->priv->ice_lock)
+ #define ICE_LOCK(w) (g_mutex_lock (ICE_GET_LOCK(w)))
+ #define ICE_UNLOCK(w) (g_mutex_unlock (ICE_GET_LOCK(w)))
+
+ #define DC_GET_LOCK(w) (&w->priv->dc_lock)
+ #define DC_LOCK(w) (g_mutex_lock (DC_GET_LOCK(w)))
+ #define DC_UNLOCK(w) (g_mutex_unlock (DC_GET_LOCK(w)))
+
+ /* The extra time for the rtpstorage compared to the RTP jitterbuffer (in ms) */
+ #define RTPSTORAGE_EXTRA_TIME (50)
+
+ #define DEFAULT_JB_LATENCY 200
+
+ /**
+ * SECTION: element-webrtcbin
+ * title: webrtcbin
+ *
+ * This webrtcbin implements the majority of the W3's peerconnection API and
+ * implementation guide where possible. Generating offers, answers and setting
+ * local and remote SDP's are all supported. Both media descriptions and
+ * descriptions involving data channels are supported.
+ *
+ * Each input/output pad is equivalent to a Track in W3 parlance which are
+ * added/removed from the bin. The number of requested sink pads is the number
+ * of streams that will be sent to the receiver and will be associated with a
+ * GstWebRTCRTPTransceiver (very similar to W3 RTPTransceiver's).
+ *
+ * On the receiving side, RTPTransceiver's are created in response to setting
+ * a remote description. Output pads for the receiving streams in the set
+ * description are also created when data is received.
+ *
+ * A TransportStream is created when needed in order to transport the data over
+ * the necessary DTLS/ICE channel to the peer. The exact configuration depends
+ * on the negotiated SDP's between the peers based on the bundle and rtcp
+ * configuration. Some cases are outlined below for a simple single
+ * audio/video/data session:
+ *
+ * - max-bundle uses a single transport for all
+ * media/data transported. Renegotiation involves adding/removing the
+ * necessary streams to the existing transports.
+ * - max-compat involves two TransportStream per media stream
+ * to transport the rtp and the rtcp packets and a single TransportStream for
+ * all data channels. Each stream change involves modifying the associated
+ * TransportStream/s as necessary.
+ */
+
+ /*
+ * TODO:
+ * assert sending payload type matches the stream
+ * reconfiguration (of anything)
+ * LS groups
+ * balanced bundle policy
+ * setting custom DTLS certificates
+ *
+ * separate session id's from mlineindex properly
+ * how to deal with replacing a input/output track/stream
+ */
+
+ static void _update_need_negotiation (GstWebRTCBin * webrtc);
+
+ #define GST_CAT_DEFAULT gst_webrtc_bin_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+ enum
+ {
+ PROP_PAD_TRANSCEIVER = 1,
+ };
+
+ static gboolean
+ _have_nice_elements (GstWebRTCBin * webrtc)
+ {
+ GstPluginFeature *feature;
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "nicesrc");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "libnice elements are not available"));
+ return FALSE;
+ }
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "nicesink");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "libnice elements are not available"));
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ _have_sctp_elements (GstWebRTCBin * webrtc)
+ {
+ GstPluginFeature *feature;
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "sctpdec");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "sctp elements are not available"));
+ return FALSE;
+ }
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "sctpenc");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "sctp elements are not available"));
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ _have_dtls_elements (GstWebRTCBin * webrtc)
+ {
+ GstPluginFeature *feature;
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "dtlsdec");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "dtls elements are not available"));
+ return FALSE;
+ }
+
+ feature = gst_registry_lookup_feature (gst_registry_get (), "dtlsenc");
+ if (feature) {
+ gst_object_unref (feature);
+ } else {
+ GST_ELEMENT_ERROR (webrtc, CORE, MISSING_PLUGIN, NULL,
+ ("%s", "dtls elements are not available"));
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ G_DEFINE_TYPE (GstWebRTCBinPad, gst_webrtc_bin_pad, GST_TYPE_GHOST_PAD);
+
+ static void
+ gst_webrtc_bin_pad_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object);
+
+ switch (prop_id) {
+ case PROP_PAD_TRANSCEIVER:
+ g_value_set_object (value, pad->trans);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_webrtc_bin_pad_finalize (GObject * object)
+ {
+ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object);
+
+ if (pad->trans)
+ gst_object_unref (pad->trans);
+ pad->trans = NULL;
+
+ if (pad->received_caps)
+ gst_caps_unref (pad->received_caps);
+ pad->received_caps = NULL;
+
+ G_OBJECT_CLASS (gst_webrtc_bin_pad_parent_class)->finalize (object);
+ }
+
+ static void
+ gst_webrtc_bin_pad_class_init (GstWebRTCBinPadClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->get_property = gst_webrtc_bin_pad_get_property;
+ gobject_class->finalize = gst_webrtc_bin_pad_finalize;
+
+ g_object_class_install_property (gobject_class,
+ PROP_PAD_TRANSCEIVER,
+ g_param_spec_object ("transceiver", "Transceiver",
+ "Transceiver associated with this pad",
+ GST_TYPE_WEBRTC_RTP_TRANSCEIVER,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_webrtc_bin_pad_update_ssrc_event (GstWebRTCBinPad * wpad)
+ {
+ if (wpad->received_caps) {
+ WebRTCTransceiver *trans = (WebRTCTransceiver *) wpad->trans;
+ GstPad *pad = GST_PAD (wpad);
+
+ trans->ssrc_event =
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY,
+ gst_structure_new ("GstWebRtcBinUpdateTos", "ssrc", G_TYPE_UINT,
+ trans->current_ssrc, NULL));
+ gst_pad_send_event (pad, gst_event_ref (trans->ssrc_event));
+ }
+ }
+
+ static gboolean
+ gst_webrtcbin_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (pad);
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (parent);
+ gboolean check_negotiation = FALSE;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ check_negotiation = (!wpad->received_caps
+ || gst_caps_is_equal (wpad->received_caps, caps));
+ gst_caps_replace (&wpad->received_caps, caps);
+
+ GST_DEBUG_OBJECT (parent,
+ "On %" GST_PTR_FORMAT " checking negotiation? %u, caps %"
+ GST_PTR_FORMAT, pad, check_negotiation, caps);
+
+ if (check_negotiation) {
+ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (wpad->trans);
+ const GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_get_uint (s, "ssrc", &trans->current_ssrc);
+ gst_webrtc_bin_pad_update_ssrc_event (wpad);
+ }
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ check_negotiation = TRUE;
+ }
+
+ if (check_negotiation) {
+ PC_LOCK (webrtc);
+ _update_need_negotiation (webrtc);
+ PC_UNLOCK (webrtc);
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static gboolean
+ gst_webrtcbin_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (pad);
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_ACCEPT_CAPS:
+ GST_OBJECT_LOCK (wpad->trans);
+ if (wpad->trans->codec_preferences) {
+ GstCaps *caps;
+
+ gst_query_parse_accept_caps (query, &caps);
+
+ gst_query_set_accept_caps_result (query,
+ gst_caps_can_intersect (caps, wpad->trans->codec_preferences));
+ ret = TRUE;
+ }
+ GST_OBJECT_UNLOCK (wpad->trans);
+ break;
+
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *codec_preferences = NULL;
+
+ GST_OBJECT_LOCK (wpad->trans);
+ if (wpad->trans->codec_preferences)
+ codec_preferences = gst_caps_ref (wpad->trans->codec_preferences);
+ GST_OBJECT_UNLOCK (wpad->trans);
+
+ if (codec_preferences) {
+ GstCaps *filter = NULL;
+ GstCaps *filter_prefs = NULL;
+ GstPad *target;
+
+ gst_query_parse_caps (query, &filter);
+
+ if (filter) {
+ filter_prefs = gst_caps_intersect_full (filter, codec_preferences,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (codec_preferences);
+ } else {
+ filter_prefs = codec_preferences;
+ }
+
+ target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad));
+ if (target) {
+ GstCaps *result;
+
+ result = gst_pad_query_caps (target, filter_prefs);
+ gst_query_set_caps_result (query, result);
+ gst_caps_unref (result);
+
+ gst_object_unref (target);
+ } else {
+ gst_query_set_caps_result (query, filter_prefs);
+ }
+
+ gst_caps_unref (filter_prefs);
+ ret = TRUE;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (ret)
+ return TRUE;
+
+ return gst_pad_query_default (pad, parent, query);
+ }
+
+
+ static void
+ gst_webrtc_bin_pad_init (GstWebRTCBinPad * pad)
+ {
+ }
+
+ static GstWebRTCBinPad *
+ gst_webrtc_bin_pad_new (const gchar * name, GstPadDirection direction)
+ {
+ GstWebRTCBinPad *pad;
+ GstPadTemplate *template;
+
+ if (direction == GST_PAD_SINK)
+ template = gst_static_pad_template_get (&sink_template);
+ else if (direction == GST_PAD_SRC)
+ template = gst_static_pad_template_get (&src_template);
+ else
+ g_assert_not_reached ();
+
+ pad =
+ g_object_new (gst_webrtc_bin_pad_get_type (), "name", name, "direction",
+ direction, "template", template, NULL);
+ gst_object_unref (template);
+
+ gst_pad_set_event_function (GST_PAD (pad), gst_webrtcbin_sink_event);
+ gst_pad_set_query_function (GST_PAD (pad), gst_webrtcbin_sink_query);
+
+ GST_DEBUG_OBJECT (pad, "new visible pad with direction %s",
+ direction == GST_PAD_SRC ? "src" : "sink");
+ return pad;
+ }
+
+ #define gst_webrtc_bin_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstWebRTCBin, gst_webrtc_bin, GST_TYPE_BIN,
+ G_ADD_PRIVATE (GstWebRTCBin)
+ GST_DEBUG_CATEGORY_INIT (gst_webrtc_bin_debug, "webrtcbin", 0,
+ "webrtcbin element"););
+
+ static GstPad *_connect_input_stream (GstWebRTCBin * webrtc,
+ GstWebRTCBinPad * pad);
+
+ enum
+ {
+ SIGNAL_0,
+ CREATE_OFFER_SIGNAL,
+ CREATE_ANSWER_SIGNAL,
+ SET_LOCAL_DESCRIPTION_SIGNAL,
+ SET_REMOTE_DESCRIPTION_SIGNAL,
+ ADD_ICE_CANDIDATE_SIGNAL,
+ ON_NEGOTIATION_NEEDED_SIGNAL,
+ ON_ICE_CANDIDATE_SIGNAL,
+ ON_NEW_TRANSCEIVER_SIGNAL,
+ GET_STATS_SIGNAL,
+ ADD_TRANSCEIVER_SIGNAL,
+ GET_TRANSCEIVER_SIGNAL,
+ GET_TRANSCEIVERS_SIGNAL,
+ ADD_TURN_SERVER_SIGNAL,
+ CREATE_DATA_CHANNEL_SIGNAL,
+ ON_DATA_CHANNEL_SIGNAL,
+ LAST_SIGNAL,
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_CONNECTION_STATE,
+ PROP_SIGNALING_STATE,
+ PROP_ICE_GATHERING_STATE,
+ PROP_ICE_CONNECTION_STATE,
+ PROP_LOCAL_DESCRIPTION,
+ PROP_CURRENT_LOCAL_DESCRIPTION,
+ PROP_PENDING_LOCAL_DESCRIPTION,
+ PROP_REMOTE_DESCRIPTION,
+ PROP_CURRENT_REMOTE_DESCRIPTION,
+ PROP_PENDING_REMOTE_DESCRIPTION,
+ PROP_STUN_SERVER,
+ PROP_TURN_SERVER,
+ PROP_BUNDLE_POLICY,
+ PROP_ICE_TRANSPORT_POLICY,
+ PROP_ICE_AGENT,
+ PROP_LATENCY,
+ PROP_SCTP_TRANSPORT,
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ PROP_NETSIM,
++ PROP_DROP_PROBABILITY_SENDER,
++ PROP_DROP_PROBABILITY_RECEIVER
++#endif
+ };
+
+ static guint gst_webrtc_bin_signals[LAST_SIGNAL] = { 0 };
+
+ typedef struct
+ {
+ guint session_id;
+ GstWebRTCICEStream *stream;
+ } IceStreamItem;
+
+ /* FIXME: locking? */
+ GstWebRTCICEStream *
+ _find_ice_stream_for_session (GstWebRTCBin * webrtc, guint session_id)
+ {
+ int i;
+
+ for (i = 0; i < webrtc->priv->ice_stream_map->len; i++) {
+ IceStreamItem *item =
+ &g_array_index (webrtc->priv->ice_stream_map, IceStreamItem, i);
+
+ if (item->session_id == session_id) {
+ GST_TRACE_OBJECT (webrtc, "Found ice stream id %" GST_PTR_FORMAT " for "
+ "session %u", item->stream, session_id);
+ return item->stream;
+ }
+ }
+
+ GST_TRACE_OBJECT (webrtc, "No ice stream available for session %u",
+ session_id);
+ return NULL;
+ }
+
+ void
+ _add_ice_stream_item (GstWebRTCBin * webrtc, guint session_id,
+ GstWebRTCICEStream * stream)
+ {
+ IceStreamItem item = { session_id, stream };
+
+ GST_TRACE_OBJECT (webrtc, "adding ice stream %" GST_PTR_FORMAT " for "
+ "session %u", stream, session_id);
+ g_array_append_val (webrtc->priv->ice_stream_map, item);
+ }
+
+ typedef gboolean (*FindTransceiverFunc) (GstWebRTCRTPTransceiver * p1,
+ gconstpointer data);
+
+ static GstWebRTCRTPTransceiver *
+ _find_transceiver (GstWebRTCBin * webrtc, gconstpointer data,
+ FindTransceiverFunc func)
+ {
+ int i;
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *transceiver =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+
+ if (func (transceiver, data))
+ return transceiver;
+ }
+
+ return NULL;
+ }
+
+ static gboolean
+ match_for_mid (GstWebRTCRTPTransceiver * trans, const gchar * mid)
+ {
+ return g_strcmp0 (trans->mid, mid) == 0;
+ }
+
+ static gboolean
+ transceiver_match_for_mline (GstWebRTCRTPTransceiver * trans, guint * mline)
+ {
+ if (trans->stopped)
+ return FALSE;
+
+ return trans->mline == *mline;
+ }
+
+ static GstWebRTCRTPTransceiver *
+ _find_transceiver_for_mline (GstWebRTCBin * webrtc, guint mlineindex)
+ {
+ GstWebRTCRTPTransceiver *trans;
+
+ trans = _find_transceiver (webrtc, &mlineindex,
+ (FindTransceiverFunc) transceiver_match_for_mline);
+
+ GST_TRACE_OBJECT (webrtc,
+ "Found transceiver %" GST_PTR_FORMAT " for mlineindex %u", trans,
+ mlineindex);
+
+ return trans;
+ }
+
+ typedef gboolean (*FindTransportFunc) (TransportStream * p1,
+ gconstpointer data);
+
+ static TransportStream *
+ _find_transport (GstWebRTCBin * webrtc, gconstpointer data,
+ FindTransportFunc func)
+ {
+ int i;
+
+ for (i = 0; i < webrtc->priv->transports->len; i++) {
+ TransportStream *stream = g_ptr_array_index (webrtc->priv->transports, i);
+
+ if (func (stream, data))
+ return stream;
+ }
+
+ return NULL;
+ }
+
+ static gboolean
+ match_stream_for_session (TransportStream * trans, guint * session)
+ {
+ return trans->session_id == *session;
+ }
+
+ static TransportStream *
+ _find_transport_for_session (GstWebRTCBin * webrtc, guint session_id)
+ {
+ TransportStream *stream;
+
+ stream = _find_transport (webrtc, &session_id,
+ (FindTransportFunc) match_stream_for_session);
+
+ GST_TRACE_OBJECT (webrtc,
+ "Found transport %" GST_PTR_FORMAT " for session %u", stream, session_id);
+
+ return stream;
+ }
+
+ typedef gboolean (*FindPadFunc) (GstWebRTCBinPad * p1, gconstpointer data);
+
+ static GstWebRTCBinPad *
+ _find_pad (GstWebRTCBin * webrtc, gconstpointer data, FindPadFunc func)
+ {
+ GstElement *element = GST_ELEMENT (webrtc);
+ GList *l;
+
+ GST_OBJECT_LOCK (webrtc);
+ l = element->pads;
+ for (; l; l = g_list_next (l)) {
+ if (!GST_IS_WEBRTC_BIN_PAD (l->data))
+ continue;
+ if (func (l->data, data)) {
+ gst_object_ref (l->data);
+ GST_OBJECT_UNLOCK (webrtc);
+ return l->data;
+ }
+ }
+
+ l = webrtc->priv->pending_pads;
+ for (; l; l = g_list_next (l)) {
+ if (!GST_IS_WEBRTC_BIN_PAD (l->data))
+ continue;
+ if (func (l->data, data)) {
+ gst_object_ref (l->data);
+ GST_OBJECT_UNLOCK (webrtc);
+ return l->data;
+ }
+ }
+ GST_OBJECT_UNLOCK (webrtc);
+
+ return NULL;
+ }
+
+ typedef gboolean (*FindDataChannelFunc) (WebRTCDataChannel * p1,
+ gconstpointer data);
+
+ static WebRTCDataChannel *
+ _find_data_channel (GstWebRTCBin * webrtc, gconstpointer data,
+ FindDataChannelFunc func)
+ {
+ int i;
+
+ for (i = 0; i < webrtc->priv->data_channels->len; i++) {
+ WebRTCDataChannel *channel =
+ g_ptr_array_index (webrtc->priv->data_channels, i);
+
+ if (func (channel, data))
+ return channel;
+ }
+
+ return NULL;
+ }
+
+ static gboolean
+ data_channel_match_for_id (WebRTCDataChannel * channel, gint * id)
+ {
+ return channel->parent.id == *id;
+ }
+
+ /* always called with dc_lock held */
+ static WebRTCDataChannel *
+ _find_data_channel_for_id (GstWebRTCBin * webrtc, gint id)
+ {
+ WebRTCDataChannel *channel;
+
+ channel = _find_data_channel (webrtc, &id,
+ (FindDataChannelFunc) data_channel_match_for_id);
+
+ GST_TRACE_OBJECT (webrtc,
+ "Found data channel %" GST_PTR_FORMAT " for id %i", channel, id);
+
+ return channel;
+ }
+
+ static void
+ _add_pad_to_list (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad)
+ {
+ GST_OBJECT_LOCK (webrtc);
+ webrtc->priv->pending_pads = g_list_prepend (webrtc->priv->pending_pads, pad);
+ GST_OBJECT_UNLOCK (webrtc);
+ }
+
+ static void
+ _remove_pending_pad (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad)
+ {
+ GST_OBJECT_LOCK (webrtc);
+ webrtc->priv->pending_pads = g_list_remove (webrtc->priv->pending_pads, pad);
+ GST_OBJECT_UNLOCK (webrtc);
+ }
+
+ static void
+ _add_pad (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad)
+ {
+ _remove_pending_pad (webrtc, pad);
+
+ if (webrtc->priv->running)
+ gst_pad_set_active (GST_PAD (pad), TRUE);
+ gst_element_add_pad (GST_ELEMENT (webrtc), GST_PAD (pad));
+ }
+
+ static void
+ _remove_pad (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad)
+ {
+ _remove_pending_pad (webrtc, pad);
+
+ gst_element_remove_pad (GST_ELEMENT (webrtc), GST_PAD (pad));
++#ifdef __TIZEN__
++ GST_OBJECT_LOCK (webrtc);
++ webrtc->priv->max_sink_pad_serial--;
++ GST_OBJECT_UNLOCK (webrtc);
++#endif
+ }
+
+ typedef struct
+ {
+ GstPadDirection direction;
+ guint mline;
+ } MLineMatch;
+
+ static gboolean
+ pad_match_for_mline (GstWebRTCBinPad * pad, const MLineMatch * match)
+ {
+ return GST_PAD_DIRECTION (pad) == match->direction
+ && pad->trans->mline == match->mline;
+ }
+
+ static GstWebRTCBinPad *
+ _find_pad_for_mline (GstWebRTCBin * webrtc, GstPadDirection direction,
+ guint mline)
+ {
+ MLineMatch m = { direction, mline };
+
+ return _find_pad (webrtc, &m, (FindPadFunc) pad_match_for_mline);
+ }
+
+ typedef struct
+ {
+ GstPadDirection direction;
+ GstWebRTCRTPTransceiver *trans;
+ } TransMatch;
+
+ static gboolean
+ pad_match_for_transceiver (GstWebRTCBinPad * pad, TransMatch * m)
+ {
+ return GST_PAD_DIRECTION (pad) == m->direction && pad->trans == m->trans;
+ }
+
+ static GstWebRTCBinPad *
+ _find_pad_for_transceiver (GstWebRTCBin * webrtc, GstPadDirection direction,
+ GstWebRTCRTPTransceiver * trans)
+ {
+ TransMatch m = { direction, trans };
+
+ return _find_pad (webrtc, &m, (FindPadFunc) pad_match_for_transceiver);
+ }
+
+ #if 0
+ static gboolean
+ match_for_ssrc (GstWebRTCBinPad * pad, guint * ssrc)
+ {
+ return pad->ssrc == *ssrc;
+ }
+
+ static gboolean
+ match_for_pad (GstWebRTCBinPad * pad, GstWebRTCBinPad * other)
+ {
+ return pad == other;
+ }
+ #endif
+
+ static gboolean
+ _unlock_pc_thread (GMutex * lock)
+ {
+ g_mutex_unlock (lock);
+ return G_SOURCE_REMOVE;
+ }
+
+ static gpointer
+ _gst_pc_thread (GstWebRTCBin * webrtc)
+ {
+ PC_LOCK (webrtc);
+ webrtc->priv->main_context = g_main_context_new ();
+ webrtc->priv->loop = g_main_loop_new (webrtc->priv->main_context, FALSE);
+
+ PC_COND_BROADCAST (webrtc);
+ g_main_context_invoke (webrtc->priv->main_context,
+ (GSourceFunc) _unlock_pc_thread, PC_GET_LOCK (webrtc));
+
+ /* Having the thread be the thread default GMainContext will break the
+ * required queue-like ordering (from W3's peerconnection spec) of re-entrant
+ * tasks */
+ g_main_loop_run (webrtc->priv->loop);
+
+ GST_OBJECT_LOCK (webrtc);
+ g_main_context_unref (webrtc->priv->main_context);
+ webrtc->priv->main_context = NULL;
+ GST_OBJECT_UNLOCK (webrtc);
+
+ PC_LOCK (webrtc);
+ g_main_loop_unref (webrtc->priv->loop);
+ webrtc->priv->loop = NULL;
+ PC_COND_BROADCAST (webrtc);
+ PC_UNLOCK (webrtc);
+
+ return NULL;
+ }
+
+ static void
+ _start_thread (GstWebRTCBin * webrtc)
+ {
+ gchar *name;
+
+ PC_LOCK (webrtc);
+ name = g_strdup_printf ("%s:pc", GST_OBJECT_NAME (webrtc));
+ webrtc->priv->thread = g_thread_new (name, (GThreadFunc) _gst_pc_thread,
+ webrtc);
+ g_free (name);
+
+ while (!webrtc->priv->loop)
+ PC_COND_WAIT (webrtc);
+ webrtc->priv->is_closed = FALSE;
+ PC_UNLOCK (webrtc);
+ }
+
+ static void
+ _stop_thread (GstWebRTCBin * webrtc)
+ {
+ GST_OBJECT_LOCK (webrtc);
+ webrtc->priv->is_closed = TRUE;
+ GST_OBJECT_UNLOCK (webrtc);
+
+ PC_LOCK (webrtc);
+ g_main_loop_quit (webrtc->priv->loop);
+ while (webrtc->priv->loop)
+ PC_COND_WAIT (webrtc);
+ PC_UNLOCK (webrtc);
+
+ g_thread_unref (webrtc->priv->thread);
+ }
+
+ static gboolean
+ _execute_op (GstWebRTCBinTask * op)
+ {
+ GstStructure *s;
+
+ PC_LOCK (op->webrtc);
+ if (op->webrtc->priv->is_closed) {
+ PC_UNLOCK (op->webrtc);
+
+ if (op->promise) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "webrtcbin is closed. aborting execution.");
+ GstStructure *s =
+ gst_structure_new ("application/x-gstwebrtcbin-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (op->promise, s);
+
+ g_clear_error (&error);
+ }
+ GST_DEBUG_OBJECT (op->webrtc,
+ "Peerconnection is closed, aborting execution");
+ goto out;
+ }
+
+ s = op->op (op->webrtc, op->data);
+
+ PC_UNLOCK (op->webrtc);
+
+ if (op->promise)
+ gst_promise_reply (op->promise, s);
+ else if (s)
+ gst_structure_free (s);
+
+ out:
+ return G_SOURCE_REMOVE;
+ }
+
+ static void
+ _free_op (GstWebRTCBinTask * op)
+ {
+ if (op->notify)
+ op->notify (op->data);
+ if (op->promise)
+ gst_promise_unref (op->promise);
+ g_free (op);
+ }
+
+ /*
+ * @promise is for correctly signalling the failure case to the caller when
+ * the user supplies it. Without passing it in, the promise would never
+ * be replied to in the case that @webrtc becomes closed between the idle
+ * source addition and the the execution of the idle source.
+ */
+ gboolean
+ gst_webrtc_bin_enqueue_task (GstWebRTCBin * webrtc, GstWebRTCBinFunc func,
+ gpointer data, GDestroyNotify notify, GstPromise * promise)
+ {
+ GstWebRTCBinTask *op;
+ GMainContext *ctx;
+ GSource *source;
+
+ g_return_val_if_fail (GST_IS_WEBRTC_BIN (webrtc), FALSE);
+
+ GST_OBJECT_LOCK (webrtc);
+ if (webrtc->priv->is_closed) {
+ GST_OBJECT_UNLOCK (webrtc);
+ GST_DEBUG_OBJECT (webrtc, "Peerconnection is closed, aborting execution");
+ if (notify)
+ notify (data);
+ return FALSE;
+ }
+ ctx = g_main_context_ref (webrtc->priv->main_context);
+ GST_OBJECT_UNLOCK (webrtc);
+
+ op = g_new0 (GstWebRTCBinTask, 1);
+ op->webrtc = webrtc;
+ op->op = func;
+ op->data = data;
+ op->notify = notify;
+ if (promise)
+ op->promise = gst_promise_ref (promise);
+
+ source = g_idle_source_new ();
+ g_source_set_priority (source, G_PRIORITY_DEFAULT);
+ g_source_set_callback (source, (GSourceFunc) _execute_op, op,
+ (GDestroyNotify) _free_op);
+ g_source_attach (source, ctx);
+ g_source_unref (source);
+ g_main_context_unref (ctx);
+
+ return TRUE;
+ }
+
+ /* https://www.w3.org/TR/webrtc/#dom-rtciceconnectionstate */
+ static GstWebRTCICEConnectionState
+ _collate_ice_connection_states (GstWebRTCBin * webrtc)
+ {
+ #define STATE(val) GST_WEBRTC_ICE_CONNECTION_STATE_ ## val
+ GstWebRTCICEConnectionState any_state = 0;
+ gboolean all_new_or_closed = TRUE;
+ gboolean all_completed_or_closed = TRUE;
+ gboolean all_connected_completed_or_closed = TRUE;
+ int i;
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *rtp_trans =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+ GstWebRTCICETransport *transport;
+ GstWebRTCICEConnectionState ice_state;
+
+ if (rtp_trans->stopped) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p stopped", rtp_trans);
+ continue;
+ }
+
+ if (!rtp_trans->mid) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p has no mid", rtp_trans);
+ continue;
+ }
+
+ transport = webrtc_transceiver_get_dtls_transport (rtp_trans)->transport;
+
+ /* get transport state */
+ g_object_get (transport, "state", &ice_state, NULL);
+ GST_TRACE_OBJECT (webrtc, "transceiver %p state 0x%x", rtp_trans,
+ ice_state);
+ any_state |= (1 << ice_state);
+
+ if (ice_state != STATE (NEW) && ice_state != STATE (CLOSED))
+ all_new_or_closed = FALSE;
+ if (ice_state != STATE (COMPLETED) && ice_state != STATE (CLOSED))
+ all_completed_or_closed = FALSE;
+ if (ice_state != STATE (CONNECTED) && ice_state != STATE (COMPLETED)
+ && ice_state != STATE (CLOSED))
+ all_connected_completed_or_closed = FALSE;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "ICE connection state: 0x%x", any_state);
+
+ if (webrtc->priv->is_closed) {
+ GST_TRACE_OBJECT (webrtc, "returning closed");
+ return STATE (CLOSED);
+ }
+ /* Any of the RTCIceTransports are in the failed state. */
+ if (any_state & (1 << STATE (FAILED))) {
+ GST_TRACE_OBJECT (webrtc, "returning failed");
+ return STATE (FAILED);
+ }
+ /* Any of the RTCIceTransports are in the disconnected state. */
+ if (any_state & (1 << STATE (DISCONNECTED))) {
+ GST_TRACE_OBJECT (webrtc, "returning disconnected");
+ return STATE (DISCONNECTED);
+ }
+ /* All of the RTCIceTransports are in the new or closed state, or there are
+ * no transports. */
+ if (all_new_or_closed || webrtc->priv->transceivers->len == 0) {
+ GST_TRACE_OBJECT (webrtc, "returning new");
+ return STATE (NEW);
+ }
+ /* Any of the RTCIceTransports are in the checking or new state. */
+ if ((any_state & (1 << STATE (CHECKING))) || (any_state & (1 << STATE (NEW)))) {
+ GST_TRACE_OBJECT (webrtc, "returning checking");
+ return STATE (CHECKING);
+ }
+ /* All RTCIceTransports are in the completed or closed state. */
+ if (all_completed_or_closed) {
+ GST_TRACE_OBJECT (webrtc, "returning completed");
+ return STATE (COMPLETED);
+ }
+ /* All RTCIceTransports are in the connected, completed or closed state. */
+ if (all_connected_completed_or_closed) {
+ GST_TRACE_OBJECT (webrtc, "returning connected");
+ return STATE (CONNECTED);
+ }
+
+ GST_FIXME ("unspecified situation, returning old state");
+ return webrtc->ice_connection_state;
+ #undef STATE
+ }
+
+ /* https://www.w3.org/TR/webrtc/#dom-rtcicegatheringstate */
+ static GstWebRTCICEGatheringState
+ _collate_ice_gathering_states (GstWebRTCBin * webrtc)
+ {
+ #define STATE(val) GST_WEBRTC_ICE_GATHERING_STATE_ ## val
+ GstWebRTCICEGatheringState any_state = 0;
+ gboolean all_completed = webrtc->priv->transceivers->len > 0;
+ int i;
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *rtp_trans =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans);
+ TransportStream *stream = trans->stream;
+ GstWebRTCDTLSTransport *dtls_transport;
+ GstWebRTCICETransport *transport;
+ GstWebRTCICEGatheringState ice_state;
+
+ if (rtp_trans->stopped || stream == NULL) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p stopped or unassociated",
+ rtp_trans);
+ continue;
+ }
+
+ /* We only have a mid in the transceiver after we got the SDP answer,
+ * which is usually long after gathering has finished */
+ if (!rtp_trans->mid) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p has no mid", rtp_trans);
+ }
+
+ dtls_transport = webrtc_transceiver_get_dtls_transport (rtp_trans);
+ if (dtls_transport == NULL) {
+ GST_WARNING ("Transceiver %p has no DTLS transport", rtp_trans);
+ continue;
+ }
+
+ transport = dtls_transport->transport;
+
+ /* get gathering state */
+ g_object_get (transport, "gathering-state", &ice_state, NULL);
+ GST_TRACE_OBJECT (webrtc, "transceiver %p gathering state: 0x%x", rtp_trans,
+ ice_state);
+ any_state |= (1 << ice_state);
+ if (ice_state != STATE (COMPLETE))
+ all_completed = FALSE;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "ICE gathering state: 0x%x", any_state);
+
+ /* Any of the RTCIceTransport s are in the gathering state. */
+ if (any_state & (1 << STATE (GATHERING))) {
+ GST_TRACE_OBJECT (webrtc, "returning gathering");
+ return STATE (GATHERING);
+ }
+ /* At least one RTCIceTransport exists, and all RTCIceTransport s are in
+ * the completed gathering state. */
+ if (all_completed) {
+ GST_TRACE_OBJECT (webrtc, "returning complete");
+ return STATE (COMPLETE);
+ }
+
+ /* Any of the RTCIceTransport s are in the new gathering state and none
+ * of the transports are in the gathering state, or there are no transports. */
+ GST_TRACE_OBJECT (webrtc, "returning new");
+ return STATE (NEW);
+ #undef STATE
+ }
+
+ /* https://www.w3.org/TR/webrtc/#rtcpeerconnectionstate-enum */
+ static GstWebRTCPeerConnectionState
+ _collate_peer_connection_states (GstWebRTCBin * webrtc)
+ {
+ #define STATE(v) GST_WEBRTC_PEER_CONNECTION_STATE_ ## v
+ #define ICE_STATE(v) GST_WEBRTC_ICE_CONNECTION_STATE_ ## v
+ #define DTLS_STATE(v) GST_WEBRTC_DTLS_TRANSPORT_STATE_ ## v
+ GstWebRTCICEConnectionState any_ice_state = 0;
+ GstWebRTCDTLSTransportState any_dtls_state = 0;
+ gboolean ice_all_new_or_closed = TRUE;
+ gboolean dtls_all_new_or_closed = TRUE;
+ gboolean ice_all_new_connecting_or_checking = TRUE;
+ gboolean dtls_all_new_connecting_or_checking = TRUE;
+ gboolean ice_all_connected_completed_or_closed = TRUE;
+ gboolean dtls_all_connected_completed_or_closed = TRUE;
+ int i;
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *rtp_trans =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+ GstWebRTCDTLSTransport *transport;
+ GstWebRTCICEConnectionState ice_state;
+ GstWebRTCDTLSTransportState dtls_state;
+
+ if (rtp_trans->stopped) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p stopped", rtp_trans);
+ continue;
+ }
+ if (!rtp_trans->mid) {
+ GST_TRACE_OBJECT (webrtc, "transceiver %p has no mid", rtp_trans);
+ continue;
+ }
+
+ transport = webrtc_transceiver_get_dtls_transport (rtp_trans);
+
+ /* get transport state */
+ g_object_get (transport, "state", &dtls_state, NULL);
+ GST_TRACE_OBJECT (webrtc, "transceiver %p DTLS state: 0x%x", rtp_trans,
+ dtls_state);
+ any_dtls_state |= (1 << dtls_state);
+
+ if (dtls_state != DTLS_STATE (NEW) && dtls_state != DTLS_STATE (CLOSED))
+ dtls_all_new_or_closed = FALSE;
+ if (dtls_state != DTLS_STATE (NEW) && dtls_state != DTLS_STATE (CONNECTING))
+ dtls_all_new_connecting_or_checking = FALSE;
+ if (dtls_state != DTLS_STATE (CONNECTED)
+ && dtls_state != DTLS_STATE (CLOSED))
+ dtls_all_connected_completed_or_closed = FALSE;
+
+ g_object_get (transport->transport, "state", &ice_state, NULL);
+ GST_TRACE_OBJECT (webrtc, "transceiver %p ICE state: 0x%x", rtp_trans,
+ ice_state);
+ any_ice_state |= (1 << ice_state);
+
+ if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CLOSED))
+ ice_all_new_or_closed = FALSE;
+ if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CHECKING))
+ ice_all_new_connecting_or_checking = FALSE;
+ if (ice_state != ICE_STATE (CONNECTED) && ice_state != ICE_STATE (COMPLETED)
+ && ice_state != ICE_STATE (CLOSED))
+ ice_all_connected_completed_or_closed = FALSE;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "ICE connection state: 0x%x. DTLS connection "
+ "state: 0x%x", any_ice_state, any_dtls_state);
+
+ /* The RTCPeerConnection object's [[ isClosed]] slot is true. */
+ if (webrtc->priv->is_closed) {
+ GST_TRACE_OBJECT (webrtc, "returning closed");
+ return STATE (CLOSED);
+ }
+
+ /* Any of the RTCIceTransport s or RTCDtlsTransport s are in a failed state. */
+ if (any_ice_state & (1 << ICE_STATE (FAILED))) {
+ GST_TRACE_OBJECT (webrtc, "returning failed");
+ return STATE (FAILED);
+ }
+ if (any_dtls_state & (1 << DTLS_STATE (FAILED))) {
+ GST_TRACE_OBJECT (webrtc, "returning failed");
+ return STATE (FAILED);
+ }
+
+ /* Any of the RTCIceTransport's or RTCDtlsTransport's are in the disconnected
+ * state. */
+ if (any_ice_state & (1 << ICE_STATE (DISCONNECTED))) {
+ GST_TRACE_OBJECT (webrtc, "returning disconnected");
+ return STATE (DISCONNECTED);
+ }
+
+ /* All RTCIceTransports and RTCDtlsTransports are in the new or closed
+ * state, or there are no transports. */
+ if ((dtls_all_new_or_closed && ice_all_new_or_closed)
+ || webrtc->priv->transceivers->len == 0) {
+ GST_TRACE_OBJECT (webrtc, "returning new");
+ return STATE (NEW);
+ }
+
+ /* All RTCIceTransports and RTCDtlsTransports are in the new, connecting
+ * or checking state. */
+ if (dtls_all_new_connecting_or_checking && ice_all_new_connecting_or_checking) {
+ GST_TRACE_OBJECT (webrtc, "returning connecting");
+ return STATE (CONNECTING);
+ }
+
+ /* All RTCIceTransports and RTCDtlsTransports are in the connected,
+ * completed or closed state. */
+ if (dtls_all_connected_completed_or_closed
+ && ice_all_connected_completed_or_closed) {
+ GST_TRACE_OBJECT (webrtc, "returning connected");
+ return STATE (CONNECTED);
+ }
+
+ /* FIXME: Unspecified state that happens for us */
+ if ((dtls_all_new_connecting_or_checking
+ || dtls_all_connected_completed_or_closed)
+ && (ice_all_new_connecting_or_checking
+ || ice_all_connected_completed_or_closed)) {
+ GST_TRACE_OBJECT (webrtc, "returning connecting");
+ return STATE (CONNECTING);
+ }
+
+ GST_FIXME_OBJECT (webrtc,
+ "Undefined situation detected, returning old state");
+ return webrtc->peer_connection_state;
+ #undef DTLS_STATE
+ #undef ICE_STATE
+ #undef STATE
+ }
+
++#ifdef __TIZEN__
++static void
++_update_and_notify_ice_gathering_state (GstWebRTCBin * webrtc, GstWebRTCICEGatheringState state)
++{
++ GstWebRTCICEGatheringState old_state = webrtc->ice_gathering_state;
++
++ if (state != webrtc->ice_gathering_state) {
++ gchar *old_s, *new_s;
++
++ old_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_GATHERING_STATE,
++ old_state);
++ new_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_GATHERING_STATE,
++ state);
++ GST_INFO_OBJECT (webrtc, "ICE gathering state change from %s(%u) to %s(%u)",
++ old_s, old_state, new_s, state);
++ g_free (old_s);
++ g_free (new_s);
++
++ webrtc->ice_gathering_state = state;
++ PC_UNLOCK (webrtc);
++ g_object_notify (G_OBJECT (webrtc), "ice-gathering-state");
++ PC_LOCK (webrtc);
++ }
++}
++#endif
++
+ static GstStructure *
+ _update_ice_gathering_state_task (GstWebRTCBin * webrtc, gpointer data)
+ {
++#ifndef __TIZEN__
+ GstWebRTCICEGatheringState old_state = webrtc->ice_gathering_state;
++#endif
+ GstWebRTCICEGatheringState new_state;
+
+ new_state = _collate_ice_gathering_states (webrtc);
+
+ /* If the new state is complete, before we update the public state,
+ * check if anyone published more ICE candidates while we were collating
+ * and stop if so, because it means there's a new later
+ * ice_gathering_state_task queued */
+ if (new_state == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
+ ICE_LOCK (webrtc);
+ if (webrtc->priv->pending_local_ice_candidates->len != 0) {
+ /* ICE candidates queued for emissiong -> we're gathering, not complete */
++#ifdef __TIZEN__
++ webrtc->pending_ice_gathering_state = GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE;
++ GST_INFO_OBJECT (webrtc, "set pending_ice_gathering_state to (%u)",
++ webrtc->pending_ice_gathering_state);
++ ICE_UNLOCK (webrtc);
++ return NULL;
++ }
++#else
+ new_state = GST_WEBRTC_ICE_GATHERING_STATE_GATHERING;
+ }
++#endif
+ ICE_UNLOCK (webrtc);
+ }
+
++#ifdef __TIZEN__
++ _update_and_notify_ice_gathering_state (webrtc, new_state);
++#else
+ if (new_state != webrtc->ice_gathering_state) {
+ gchar *old_s, *new_s;
+
+ old_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_GATHERING_STATE,
+ old_state);
+ new_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_GATHERING_STATE,
+ new_state);
+ GST_INFO_OBJECT (webrtc, "ICE gathering state change from %s(%u) to %s(%u)",
+ old_s, old_state, new_s, new_state);
+ g_free (old_s);
+ g_free (new_s);
+
+ webrtc->ice_gathering_state = new_state;
+ PC_UNLOCK (webrtc);
+ g_object_notify (G_OBJECT (webrtc), "ice-gathering-state");
+ PC_LOCK (webrtc);
+ }
+
++#endif
+ return NULL;
+ }
+
+ static void
+ _update_ice_gathering_state (GstWebRTCBin * webrtc)
+ {
+ gst_webrtc_bin_enqueue_task (webrtc, _update_ice_gathering_state_task, NULL,
+ NULL, NULL);
+ }
+
+ static GstStructure *
+ _update_ice_connection_state_task (GstWebRTCBin * webrtc, gpointer data)
+ {
+ GstWebRTCICEConnectionState old_state = webrtc->ice_connection_state;
+ GstWebRTCICEConnectionState new_state;
+
+ new_state = _collate_ice_connection_states (webrtc);
+
+ if (new_state != old_state) {
+ gchar *old_s, *new_s;
+
+ old_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_CONNECTION_STATE,
+ old_state);
+ new_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_CONNECTION_STATE,
+ new_state);
+ GST_INFO_OBJECT (webrtc,
+ "ICE connection state change from %s(%u) to %s(%u)", old_s, old_state,
+ new_s, new_state);
+ g_free (old_s);
+ g_free (new_s);
+
+ webrtc->ice_connection_state = new_state;
+ PC_UNLOCK (webrtc);
+ g_object_notify (G_OBJECT (webrtc), "ice-connection-state");
+ PC_LOCK (webrtc);
+ }
+
+ return NULL;
+ }
+
+ static void
+ _update_ice_connection_state (GstWebRTCBin * webrtc)
+ {
+ gst_webrtc_bin_enqueue_task (webrtc, _update_ice_connection_state_task, NULL,
+ NULL, NULL);
+ }
+
+ static GstStructure *
+ _update_peer_connection_state_task (GstWebRTCBin * webrtc, gpointer data)
+ {
+ GstWebRTCPeerConnectionState old_state = webrtc->peer_connection_state;
+ GstWebRTCPeerConnectionState new_state;
+
+ new_state = _collate_peer_connection_states (webrtc);
+
+ if (new_state != old_state) {
+ gchar *old_s, *new_s;
+
+ old_s = _enum_value_to_string (GST_TYPE_WEBRTC_PEER_CONNECTION_STATE,
+ old_state);
+ new_s = _enum_value_to_string (GST_TYPE_WEBRTC_PEER_CONNECTION_STATE,
+ new_state);
+ GST_INFO_OBJECT (webrtc,
+ "Peer connection state change from %s(%u) to %s(%u)", old_s, old_state,
+ new_s, new_state);
+ g_free (old_s);
+ g_free (new_s);
+
+ webrtc->peer_connection_state = new_state;
+ PC_UNLOCK (webrtc);
+ g_object_notify (G_OBJECT (webrtc), "connection-state");
+ PC_LOCK (webrtc);
+ }
+
+ return NULL;
+ }
+
+ static void
+ _update_peer_connection_state (GstWebRTCBin * webrtc)
+ {
+ gst_webrtc_bin_enqueue_task (webrtc, _update_peer_connection_state_task,
+ NULL, NULL, NULL);
+ }
+
+ static gboolean
+ _all_sinks_have_caps (GstWebRTCBin * webrtc)
+ {
+ GList *l;
+ gboolean res = FALSE;
+
+ GST_OBJECT_LOCK (webrtc);
+ l = GST_ELEMENT (webrtc)->pads;
+ for (; l; l = g_list_next (l)) {
+ GstWebRTCBinPad *wpad;
+
+ if (!GST_IS_WEBRTC_BIN_PAD (l->data))
+ continue;
+
+ wpad = GST_WEBRTC_BIN_PAD (l->data);
+ if (GST_PAD_DIRECTION (l->data) == GST_PAD_SINK && !wpad->received_caps
+ && (!wpad->trans || !wpad->trans->stopped)) {
+ goto done;
+ }
+ }
+
+ l = webrtc->priv->pending_pads;
+ for (; l; l = g_list_next (l)) {
+ if (!GST_IS_WEBRTC_BIN_PAD (l->data)) {
+ goto done;
+ }
+ }
+
+ res = TRUE;
+
+ done:
+ GST_OBJECT_UNLOCK (webrtc);
+ return res;
+ }
+
+ /* http://w3c.github.io/webrtc-pc/#dfn-check-if-negotiation-is-needed */
+ static gboolean
+ _check_if_negotiation_is_needed (GstWebRTCBin * webrtc)
+ {
+ int i;
+
+ GST_LOG_OBJECT (webrtc, "checking if negotiation is needed");
+
+ /* We can't negotiate until we have received caps on all our sink pads,
+ * as we will need the ssrcs in our offer / answer */
+ if (!_all_sinks_have_caps (webrtc)) {
+ GST_LOG_OBJECT (webrtc,
+ "no negotiation possible until caps have been received on all sink pads");
+ return FALSE;
+ }
+
+ /* If any implementation-specific negotiation is required, as described at
+ * the start of this section, return "true".
+ * FIXME */
+ /* FIXME: emit when input caps/format changes? */
+
+ if (!webrtc->current_local_description) {
+ GST_LOG_OBJECT (webrtc, "no local description set");
+ return TRUE;
+ }
+
+ if (!webrtc->current_remote_description) {
+ GST_LOG_OBJECT (webrtc, "no remote description set");
+ return TRUE;
+ }
+
+ /* If connection has created any RTCDataChannel's, and no m= section has
+ * been negotiated yet for data, return "true". */
+ if (webrtc->priv->data_channels->len > 0) {
+ if (_message_get_datachannel_index (webrtc->current_local_description->
+ sdp) >= G_MAXUINT) {
+ GST_LOG_OBJECT (webrtc,
+ "no data channel media section and have %u " "transports",
+ webrtc->priv->data_channels->len);
+ return TRUE;
+ }
+ }
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *trans;
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, i);
+
+ if (trans->stopped) {
+ /* FIXME: If t is stopped and is associated with an m= section according to
+ * [JSEP] (section 3.4.1.), but the associated m= section is not yet
+ * rejected in connection's currentLocalDescription or
+ * currentRemoteDescription , return "true". */
+ GST_FIXME_OBJECT (webrtc,
+ "check if the transceiver is rejected in descriptions");
+ } else {
+ const GstSDPMedia *media;
+ GstWebRTCRTPTransceiverDirection local_dir, remote_dir;
+
+ if (trans->mline == -1 || trans->mid == NULL) {
+ GST_LOG_OBJECT (webrtc, "unassociated transceiver %i %" GST_PTR_FORMAT
+ " mid %s", i, trans, trans->mid);
+ return TRUE;
+ }
+ /* internal inconsistency */
+ g_assert (trans->mline <
+ gst_sdp_message_medias_len (webrtc->current_local_description->sdp));
+ g_assert (trans->mline <
+ gst_sdp_message_medias_len (webrtc->current_remote_description->sdp));
+
+ /* FIXME: msid handling
+ * If t's direction is "sendrecv" or "sendonly", and the associated m=
+ * section in connection's currentLocalDescription doesn't contain an
+ * "a=msid" line, return "true". */
+
+ media =
+ gst_sdp_message_get_media (webrtc->current_local_description->sdp,
+ trans->mline);
+ local_dir = _get_direction_from_media (media);
+
+ media =
+ gst_sdp_message_get_media (webrtc->current_remote_description->sdp,
+ trans->mline);
+ remote_dir = _get_direction_from_media (media);
+
+ if (webrtc->current_local_description->type == GST_WEBRTC_SDP_TYPE_OFFER) {
+ /* If connection's currentLocalDescription if of type "offer", and
+ * the direction of the associated m= section in neither the offer
+ * nor answer matches t's direction, return "true". */
+
+ if (local_dir != trans->direction && remote_dir != trans->direction) {
+ gchar *local_str, *remote_str, *dir_str;
+
+ local_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ local_dir);
+ remote_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ remote_dir);
+ dir_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ trans->direction);
+
+ GST_LOG_OBJECT (webrtc, "transceiver direction (%s) doesn't match "
+ "description (local %s remote %s)", dir_str, local_str,
+ remote_str);
+
+ g_free (dir_str);
+ g_free (local_str);
+ g_free (remote_str);
+
+ return TRUE;
+ }
+ } else if (webrtc->current_local_description->type ==
+ GST_WEBRTC_SDP_TYPE_ANSWER) {
+ GstWebRTCRTPTransceiverDirection intersect_dir;
+
+ /* If connection's currentLocalDescription if of type "answer", and
+ * the direction of the associated m= section in the answer does not
+ * match t's direction intersected with the offered direction (as
+ * described in [JSEP] (section 5.3.1.)), return "true". */
+
+ /* remote is the offer, local is the answer */
+ intersect_dir = _intersect_answer_directions (remote_dir, local_dir);
+
+ if (intersect_dir != trans->direction) {
+ gchar *local_str, *remote_str, *inter_str, *dir_str;
+
+ local_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ local_dir);
+ remote_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ remote_dir);
+ dir_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ trans->direction);
+ inter_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ intersect_dir);
+
+ GST_LOG_OBJECT (webrtc, "transceiver direction (%s) doesn't match "
+ "description intersected direction %s (local %s remote %s)",
+ dir_str, local_str, inter_str, remote_str);
+
+ g_free (dir_str);
+ g_free (local_str);
+ g_free (remote_str);
+ g_free (inter_str);
+
+ return TRUE;
+ }
+ }
+ }
+ }
+
+ GST_LOG_OBJECT (webrtc, "no negotiation needed");
+ return FALSE;
+ }
+
+ static GstStructure *
+ _check_need_negotiation_task (GstWebRTCBin * webrtc, gpointer unused)
+ {
+ if (webrtc->priv->need_negotiation) {
+ GST_TRACE_OBJECT (webrtc, "emitting on-negotiation-needed");
+ PC_UNLOCK (webrtc);
+ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_NEGOTIATION_NEEDED_SIGNAL],
+ 0);
+ PC_LOCK (webrtc);
+ }
+
+ return NULL;
+ }
+
+ /* http://w3c.github.io/webrtc-pc/#dfn-update-the-negotiation-needed-flag */
+ static void
+ _update_need_negotiation (GstWebRTCBin * webrtc)
+ {
+ /* If connection's [[isClosed]] slot is true, abort these steps. */
+ if (webrtc->priv->is_closed)
+ return;
+ /* If connection's signaling state is not "stable", abort these steps. */
+ if (webrtc->signaling_state != GST_WEBRTC_SIGNALING_STATE_STABLE)
+ return;
+
+ /* If the result of checking if negotiation is needed is "false", clear the
+ * negotiation-needed flag by setting connection's [[ needNegotiation]] slot
+ * to false, and abort these steps. */
+ if (!_check_if_negotiation_is_needed (webrtc)) {
+ webrtc->priv->need_negotiation = FALSE;
+ return;
+ }
+ /* If connection's [[needNegotiation]] slot is already true, abort these steps. */
+ if (webrtc->priv->need_negotiation)
+ return;
+ /* Set connection's [[needNegotiation]] slot to true. */
+ webrtc->priv->need_negotiation = TRUE;
+ /* Queue a task to check connection's [[ needNegotiation]] slot and, if still
+ * true, fire a simple event named negotiationneeded at connection. */
+ gst_webrtc_bin_enqueue_task (webrtc, _check_need_negotiation_task, NULL,
+ NULL, NULL);
+ }
+
+ static GstCaps *
+ _query_pad_caps (GstWebRTCBin * webrtc, GstWebRTCRTPTransceiver * rtp_trans,
+ GstWebRTCBinPad * pad, GstCaps * filter, GError ** error)
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_peer_query_caps (GST_PAD (pad), filter);
+ GST_LOG_OBJECT (webrtc, "Using peer query caps: %" GST_PTR_FORMAT, caps);
+
+ if (gst_caps_is_empty (caps)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_CAPS_NEGOTIATION_FAILED,
+ "Caps negotiation on pad %s failed", GST_PAD_NAME (pad));
+ gst_clear_caps (&caps);
+ } else if (!gst_caps_is_fixed (caps) || gst_caps_is_equal (caps, filter)
+ || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
+ gst_clear_caps (&caps);
+ }
+
+ gst_caps_unref (filter);
+
+ return caps;
+ }
+
+ static GstCaps *
+ _find_codec_preferences (GstWebRTCBin * webrtc,
+ GstWebRTCRTPTransceiver * rtp_trans, guint media_idx, GError ** error)
+ {
+ WebRTCTransceiver *trans = (WebRTCTransceiver *) rtp_trans;
+ GstCaps *ret = NULL;
+ GstCaps *codec_preferences = NULL;
+ GstWebRTCBinPad *pad = NULL;
+ GstPadDirection direction;
+
+ g_assert (rtp_trans);
+ g_assert (error && *error == NULL);
+
+ GST_LOG_OBJECT (webrtc, "retrieving codec preferences from %" GST_PTR_FORMAT,
+ trans);
+
+ GST_OBJECT_LOCK (rtp_trans);
+ if (rtp_trans->codec_preferences) {
+ GST_LOG_OBJECT (webrtc, "Using codec preferences: %" GST_PTR_FORMAT,
+ rtp_trans->codec_preferences);
+ codec_preferences = gst_caps_ref (rtp_trans->codec_preferences);
+ }
+ GST_OBJECT_UNLOCK (rtp_trans);
+
+ if (rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY)
+ direction = GST_PAD_SRC;
+ else
+ direction = GST_PAD_SINK;
+
+ pad = _find_pad_for_transceiver (webrtc, direction, rtp_trans);
+
+ /* try to find a pad */
+ if (!pad)
+ pad = _find_pad_for_mline (webrtc, direction, media_idx);
+
+ /* For the case where we have set our transceiver to sendrecv, but the
+ * sink pad has not been requested yet.
+ */
+ if (!pad &&
+ rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) {
+
+ pad = _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans);
+
+ /* try to find a pad */
+ if (!pad)
+ pad = _find_pad_for_mline (webrtc, GST_PAD_SRC, media_idx);
+ }
+
+ if (pad) {
+ GstCaps *caps = NULL;
+
+ if (pad->received_caps) {
+ caps = gst_caps_ref (pad->received_caps);
+ } else {
+ static GstStaticCaps static_filter =
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) { audio, video }, payload = (int) [ 0, 127 ]");
+ GstCaps *filter = gst_static_caps_get (&static_filter);
+
+ filter = gst_caps_make_writable (filter);
+
+ if (rtp_trans->kind == GST_WEBRTC_KIND_AUDIO)
+ gst_caps_set_simple (filter, "media", G_TYPE_STRING, "audio", NULL);
+ else if (rtp_trans->kind == GST_WEBRTC_KIND_VIDEO)
+ gst_caps_set_simple (filter, "media", G_TYPE_STRING, "video", NULL);
+
+ caps = _query_pad_caps (webrtc, rtp_trans, pad, filter, error);
+ }
+ gst_object_unref (pad);
+
+ if (*error)
+ goto out;
+
+ if (caps &&
+ rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) {
+ GstWebRTCBinPad *srcpad =
+ _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans);
+
+ if (srcpad) {
+ caps = _query_pad_caps (webrtc, rtp_trans, srcpad, caps, error);
+ gst_object_unref (srcpad);
+
+ if (*error)
+ goto out;
+ }
+ }
+
+ if (caps && codec_preferences) {
+ GstCaps *intersection;
+
+ intersection = gst_caps_intersect_full (codec_preferences, caps,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_clear_caps (&caps);
+
+ if (gst_caps_is_empty (intersection)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_CAPS_NEGOTIATION_FAILED,
+ "Caps negotiation on pad %s failed againt codec preferences",
+ GST_PAD_NAME (pad));
+ gst_clear_caps (&intersection);
+ } else {
+ caps = intersection;
+ }
+ }
+
+ if (caps) {
+ if (trans)
+ gst_caps_replace (&trans->last_configured_caps, caps);
+
+ ret = caps;
+ }
+ }
+
+ if (!ret) {
+ if (codec_preferences)
+ ret = gst_caps_ref (codec_preferences);
+ else if (trans->last_configured_caps)
+ ret = gst_caps_ref (trans->last_configured_caps);
+ }
+
+ out:
+
+ if (codec_preferences)
+ gst_caps_unref (codec_preferences);
+
+ if (!ret)
+ GST_DEBUG_OBJECT (trans, "Could not find caps for mline %u", media_idx);
+
+ return ret;
+ }
+
+ static GstCaps *
+ _add_supported_attributes_to_caps (GstWebRTCBin * webrtc,
+ WebRTCTransceiver * trans, const GstCaps * caps)
+ {
+ GstWebRTCKind kind;
+ GstCaps *ret;
+ guint i;
+
+ if (caps == NULL)
+ return NULL;
+
+ ret = gst_caps_make_writable (caps);
+
+ kind = webrtc_kind_from_caps (ret);
+ for (i = 0; i < gst_caps_get_size (ret); i++) {
+ GstStructure *s = gst_caps_get_structure (ret, i);
+
+ if (trans->do_nack)
+ if (!gst_structure_has_field (s, "rtcp-fb-nack"))
+ gst_structure_set (s, "rtcp-fb-nack", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ if (kind == GST_WEBRTC_KIND_VIDEO
+ && !gst_structure_has_field (s, "rtcp-fb-nack-pli"))
+ gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL);
+ if (!gst_structure_has_field (s, "rtcp-fb-transport-cc"))
+ gst_structure_set (s, "rtcp-fb-transport-cc", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ /* FIXME: codec-specific parameters? */
+ }
+
+ return ret;
+ }
+
+ static void
+ _on_ice_transport_notify_state (GstWebRTCICETransport * transport,
+ GParamSpec * pspec, GstWebRTCBin * webrtc)
+ {
+ _update_ice_connection_state (webrtc);
+ _update_peer_connection_state (webrtc);
+ }
+
+ static void
+ _on_ice_transport_notify_gathering_state (GstWebRTCICETransport * transport,
+ GParamSpec * pspec, GstWebRTCBin * webrtc)
+ {
+ _update_ice_gathering_state (webrtc);
+ }
+
+ static void
+ _on_dtls_transport_notify_state (GstWebRTCDTLSTransport * transport,
+ GParamSpec * pspec, GstWebRTCBin * webrtc)
+ {
+ _update_peer_connection_state (webrtc);
+ }
+
+ static gboolean
+ match_ssrc (GstWebRTCRTPTransceiver * rtp_trans, gconstpointer data)
+ {
+ WebRTCTransceiver *trans = (WebRTCTransceiver *) rtp_trans;
+
+ return (trans->current_ssrc == GPOINTER_TO_UINT (data));
+ }
+
+ static gboolean
+ _on_sending_rtcp (GObject * internal_session, GstBuffer * buffer,
+ gboolean early, gpointer user_data)
+ {
+ GstWebRTCBin *webrtc = user_data;
+ GstRTCPBuffer rtcp = GST_RTCP_BUFFER_INIT;
+ GstRTCPPacket packet;
+
+ if (!gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp))
+ goto done;
+
+ if (gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_SR) {
+ guint32 ssrc;
+ GstWebRTCRTPTransceiver *rtp_trans;
+ WebRTCTransceiver *trans;
+
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL,
+ NULL);
+
+ rtp_trans = _find_transceiver (webrtc, GUINT_TO_POINTER (ssrc),
+ match_ssrc);
+ trans = (WebRTCTransceiver *) rtp_trans;
+
+ if (rtp_trans && rtp_trans->sender && trans->ssrc_event) {
+ GstPad *pad;
+ gchar *pad_name = NULL;
+
+ pad_name =
+ g_strdup_printf ("send_rtcp_src_%u",
+ rtp_trans->sender->transport->session_id);
+ pad = gst_element_get_static_pad (webrtc->rtpbin, pad_name);
+ g_free (pad_name);
+ if (pad) {
+ gst_pad_push_event (pad, gst_event_ref (trans->ssrc_event));
+ gst_object_unref (pad);
+ }
+ }
+ }
+ }
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ done:
+ /* False means we don't care about suppression */
+ return FALSE;
+ }
+
+ static void
+ gst_webrtc_bin_attach_tos_to_session (GstWebRTCBin * webrtc, guint session_id)
+ {
+ GObject *internal_session = NULL;
+
+ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-session",
+ session_id, &internal_session);
+
+ if (internal_session) {
+ g_signal_connect (internal_session, "on-sending-rtcp",
+ G_CALLBACK (_on_sending_rtcp), webrtc);
+ g_object_unref (internal_session);
+ }
+ }
+
+ static GstPadProbeReturn
+ _nicesink_pad_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ GstWebRTCBin *webrtc = user_data;
+
+ if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info))
+ == GST_EVENT_CUSTOM_DOWNSTREAM_STICKY) {
+ const GstStructure *s =
+ gst_event_get_structure (GST_PAD_PROBE_INFO_EVENT (info));
+
+ if (gst_structure_has_name (s, "GstWebRtcBinUpdateTos")) {
+ guint ssrc;
+ gint priority;
+
+ if (gst_structure_get_uint (s, "ssrc", &ssrc)) {
+ GstWebRTCRTPTransceiver *rtp_trans;
+
+ rtp_trans = _find_transceiver (webrtc, GUINT_TO_POINTER (ssrc),
+ match_ssrc);
+ if (rtp_trans) {
+ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans);
+ GstWebRTCICEStream *stream = _find_ice_stream_for_session (webrtc,
+ trans->stream->session_id);
+ guint8 dscp = 0;
+
+ /* Set DSCP field based on
+ * https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-18#section-5
+ */
+ switch (rtp_trans->sender->priority) {
+ case GST_WEBRTC_PRIORITY_TYPE_VERY_LOW:
+ dscp = 8; /* CS1 */
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_LOW:
+ dscp = 0; /* DF */
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_MEDIUM:
+ switch (rtp_trans->kind) {
+ case GST_WEBRTC_KIND_AUDIO:
+ dscp = 46; /* EF */
+ break;
+ case GST_WEBRTC_KIND_VIDEO:
+ dscp = 38; /* AF43 *//* TODO: differentiate non-interactive */
+ break;
+ case GST_WEBRTC_KIND_UNKNOWN:
+ dscp = 0;
+ break;
+ }
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_HIGH:
+ switch (rtp_trans->kind) {
+ case GST_WEBRTC_KIND_AUDIO:
+ dscp = 46; /* EF */
+ break;
+ case GST_WEBRTC_KIND_VIDEO:
+ dscp = 36; /* AF42 *//* TODO: differentiate non-interactive */
+ break;
+ case GST_WEBRTC_KIND_UNKNOWN:
+ dscp = 0;
+ break;
+ }
+ break;
+ }
+
+ gst_webrtc_ice_set_tos (webrtc->priv->ice, stream, dscp << 2);
+ }
+ } else if (gst_structure_get_enum (s, "sctp-priority",
+ GST_TYPE_WEBRTC_PRIORITY_TYPE, &priority)) {
+ guint8 dscp = 0;
+
+ /* Set DSCP field based on
+ * https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-18#section-5
+ */
+ switch (priority) {
+ case GST_WEBRTC_PRIORITY_TYPE_VERY_LOW:
+ dscp = 8; /* CS1 */
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_LOW:
+ dscp = 0; /* DF */
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_MEDIUM:
+ dscp = 10; /* AF11 */
+ break;
+ case GST_WEBRTC_PRIORITY_TYPE_HIGH:
+ dscp = 18; /* AF21 */
+ break;
+ }
+ if (webrtc->priv->data_channel_transport)
+ gst_webrtc_ice_set_tos (webrtc->priv->ice,
+ webrtc->priv->data_channel_transport->stream, dscp << 2);
+ }
+ }
+ }
+ return GST_PAD_PROBE_OK;
+ }
+
+ static void gst_webrtc_bin_attach_tos (GstWebRTCBin * webrtc);
+
+ static void
+ gst_webrtc_bin_update_sctp_priority (GstWebRTCBin * webrtc)
+ {
+ GstWebRTCPriorityType sctp_priority = 0;
+ guint i;
+
+ if (!webrtc->priv->sctp_transport)
+ return;
+
+ DC_LOCK (webrtc);
+ for (i = 0; i < webrtc->priv->data_channels->len; i++) {
+ GstWebRTCDataChannel *channel
+ = g_ptr_array_index (webrtc->priv->data_channels, i);
+
+ sctp_priority = MAX (sctp_priority, channel->priority);
+ }
+ DC_UNLOCK (webrtc);
+
+ /* Default priority is low means DSCP field is left as 0 */
+ if (sctp_priority == 0)
+ sctp_priority = GST_WEBRTC_PRIORITY_TYPE_LOW;
+
+ /* Nobody asks for DSCP, leave it as-is */
+ if (sctp_priority == GST_WEBRTC_PRIORITY_TYPE_LOW &&
+ !webrtc->priv->tos_attached)
+ return;
+
+ /* If one stream has a non-default priority, then everyone else does too */
+ gst_webrtc_bin_attach_tos (webrtc);
+
+ webrtc_sctp_transport_set_priority (webrtc->priv->sctp_transport,
+ sctp_priority);
+ }
+
+ static void
+ gst_webrtc_bin_attach_probe_to_ice_sink (GstWebRTCBin * webrtc,
+ GstWebRTCICETransport * transport)
+ {
+ GstPad *pad;
+
+ pad = gst_element_get_static_pad (transport->sink, "sink");
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ _nicesink_pad_probe, g_object_ref (webrtc),
+ (GDestroyNotify) gst_object_unref);
+ gst_object_unref (pad);
+ }
+
+ static void
+ gst_webrtc_bin_attach_tos (GstWebRTCBin * webrtc)
+ {
+ guint i;
+
+ if (webrtc->priv->tos_attached)
+ return;
+ webrtc->priv->tos_attached = TRUE;
+
+ for (i = 0; i < webrtc->priv->transports->len; i++) {
+ TransportStream *stream = g_ptr_array_index (webrtc->priv->transports, i);
+
+ gst_webrtc_bin_attach_tos_to_session (webrtc, stream->session_id);
+
+ gst_webrtc_bin_attach_probe_to_ice_sink (webrtc,
+ stream->transport->transport);
+ }
+
+ gst_webrtc_bin_update_sctp_priority (webrtc);
+ }
+
+ static WebRTCTransceiver *
+ _create_webrtc_transceiver (GstWebRTCBin * webrtc,
+ GstWebRTCRTPTransceiverDirection direction, guint mline, GstWebRTCKind kind,
+ GstCaps * codec_preferences)
+ {
+ WebRTCTransceiver *trans;
+ GstWebRTCRTPTransceiver *rtp_trans;
+ GstWebRTCRTPSender *sender;
+ GstWebRTCRTPReceiver *receiver;
+
+ sender = gst_webrtc_rtp_sender_new ();
+ receiver = gst_webrtc_rtp_receiver_new ();
+ trans = webrtc_transceiver_new (webrtc, sender, receiver);
+ rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans);
+ rtp_trans->direction = direction;
+ rtp_trans->mline = mline;
+ rtp_trans->kind = kind;
+ rtp_trans->codec_preferences =
+ codec_preferences ? gst_caps_ref (codec_preferences) : NULL;
+ /* FIXME: We don't support stopping transceiver yet so they're always not stopped */
+ rtp_trans->stopped = FALSE;
+
+ g_signal_connect_object (sender, "notify::priority",
+ G_CALLBACK (gst_webrtc_bin_attach_tos), webrtc, G_CONNECT_SWAPPED);
+
+ g_ptr_array_add (webrtc->priv->transceivers, trans);
+
+ gst_object_unref (sender);
+ gst_object_unref (receiver);
+
+ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL],
+ 0, trans);
+
+ return trans;
+ }
+
++#ifdef __TIZEN__
++static void
++_remove_webrtc_transceiver (GstWebRTCBin * webrtc,
++ GstWebRTCRTPTransceiver * trans)
++{
++ g_ptr_array_remove (webrtc->priv->transceivers, trans);
++ gst_object_unref (trans);
++}
++#endif
++
+ static TransportStream *
+ _create_transport_channel (GstWebRTCBin * webrtc, guint session_id)
+ {
+ GstWebRTCDTLSTransport *transport;
+ TransportStream *ret;
+ gchar *pad_name;
+
+ /* FIXME: how to parametrize the sender and the receiver */
+ ret = transport_stream_new (webrtc, session_id);
+ transport = ret->transport;
+
+ g_signal_connect (G_OBJECT (transport->transport), "notify::state",
+ G_CALLBACK (_on_ice_transport_notify_state), webrtc);
+ g_signal_connect (G_OBJECT (transport->transport),
+ "notify::gathering-state",
+ G_CALLBACK (_on_ice_transport_notify_gathering_state), webrtc);
+ g_signal_connect (G_OBJECT (transport), "notify::state",
+ G_CALLBACK (_on_dtls_transport_notify_state), webrtc);
+ if (webrtc->priv->tos_attached)
+ gst_webrtc_bin_attach_probe_to_ice_sink (webrtc, transport->transport);
+
+ gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->send_bin));
+ gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->receive_bin));
+ g_ptr_array_add (webrtc->priv->transports, ret);
+
+ pad_name = g_strdup_printf ("recv_rtcp_sink_%u", ret->session_id);
+ if (!gst_element_link_pads (GST_ELEMENT (ret->receive_bin), "rtcp_src",
+ GST_ELEMENT (webrtc->rtpbin), pad_name))
+ g_warn_if_reached ();
+ g_free (pad_name);
+
+ pad_name = g_strdup_printf ("send_rtcp_src_%u", ret->session_id);
+ if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name,
+ GST_ELEMENT (ret->send_bin), "rtcp_sink"))
+ g_warn_if_reached ();
+ g_free (pad_name);
+
+ GST_TRACE_OBJECT (webrtc,
+ "Create transport %" GST_PTR_FORMAT " for session %u", ret, session_id);
+
+ return ret;
+ }
+
+ static TransportStream *
+ _get_or_create_rtp_transport_channel (GstWebRTCBin * webrtc, guint session_id)
+ {
+ TransportStream *ret;
+
+ ret = _find_transport_for_session (webrtc, session_id);
+
+ if (!ret)
+ ret = _create_transport_channel (webrtc, session_id);
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (ret->send_bin));
+ gst_element_sync_state_with_parent (GST_ELEMENT (ret->receive_bin));
+
+ return ret;
+ }
+
+ /* this is called from the webrtc thread with the pc lock held */
+ static void
+ _on_data_channel_ready_state (WebRTCDataChannel * channel,
+ GParamSpec * pspec, GstWebRTCBin * webrtc)
+ {
+ GstWebRTCDataChannelState ready_state;
+
+ g_object_get (channel, "ready-state", &ready_state, NULL);
+
+ if (ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) {
+ gboolean found;
+
+ DC_LOCK (webrtc);
+ found = g_ptr_array_remove (webrtc->priv->pending_data_channels, channel);
+ if (found == FALSE) {
+ GST_FIXME_OBJECT (webrtc, "Received open for unknown data channel");
+ DC_UNLOCK (webrtc);
+ return;
+ }
+
+ g_ptr_array_add (webrtc->priv->data_channels, gst_object_ref (channel));
+ DC_UNLOCK (webrtc);
+
+ gst_webrtc_bin_update_sctp_priority (webrtc);
+
+ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_DATA_CHANNEL_SIGNAL], 0,
+ channel);
+ } else if (ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED) {
+ gboolean found;
+
+ DC_LOCK (webrtc);
+ found = g_ptr_array_remove (webrtc->priv->pending_data_channels, channel)
+ || g_ptr_array_remove (webrtc->priv->data_channels, channel);
+
+ if (found == FALSE) {
+ GST_FIXME_OBJECT (webrtc, "Received close for unknown data channel");
+ }
+ DC_UNLOCK (webrtc);
+ }
+ }
+
+ static void
+ _on_sctpdec_pad_added (GstElement * sctpdec, GstPad * pad,
+ GstWebRTCBin * webrtc)
+ {
+ WebRTCDataChannel *channel;
+ guint stream_id;
+ GstPad *sink_pad;
+
+ if (sscanf (GST_PAD_NAME (pad), "src_%u", &stream_id) != 1)
+ return;
+
+ DC_LOCK (webrtc);
+ channel = _find_data_channel_for_id (webrtc, stream_id);
+ if (!channel) {
+ channel = g_object_new (WEBRTC_TYPE_DATA_CHANNEL, NULL);
+ channel->parent.id = stream_id;
+ channel->webrtcbin = webrtc;
+
+ gst_bin_add (GST_BIN (webrtc), channel->appsrc);
+ gst_bin_add (GST_BIN (webrtc), channel->appsink);
+
+ gst_element_sync_state_with_parent (channel->appsrc);
+ gst_element_sync_state_with_parent (channel->appsink);
+
+ webrtc_data_channel_link_to_sctp (channel, webrtc->priv->sctp_transport);
+
+ g_ptr_array_add (webrtc->priv->pending_data_channels, channel);
+ }
+ DC_UNLOCK (webrtc);
+
+ g_signal_connect (channel, "notify::ready-state",
+ G_CALLBACK (_on_data_channel_ready_state), webrtc);
+
+ sink_pad = gst_element_get_static_pad (channel->appsink, "sink");
+ if (gst_pad_link (pad, sink_pad) != GST_PAD_LINK_OK)
+ GST_WARNING_OBJECT (channel, "Failed to link sctp pad %s with channel %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), channel);
+ gst_object_unref (sink_pad);
+ }
+
+ static void
+ _on_sctp_state_notify (WebRTCSCTPTransport * sctp, GParamSpec * pspec,
+ GstWebRTCBin * webrtc)
+ {
+ GstWebRTCSCTPTransportState state;
+
+ g_object_get (sctp, "state", &state, NULL);
+
+ if (state == GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED) {
+ int i;
+
+ GST_DEBUG_OBJECT (webrtc, "SCTP association established");
+
+ DC_LOCK (webrtc);
+ for (i = 0; i < webrtc->priv->data_channels->len; i++) {
+ WebRTCDataChannel *channel;
+
+ channel = g_ptr_array_index (webrtc->priv->data_channels, i);
+
+ webrtc_data_channel_link_to_sctp (channel, webrtc->priv->sctp_transport);
+
+ if (!channel->parent.negotiated && !channel->opened)
+ webrtc_data_channel_start_negotiation (channel);
+ }
+ DC_UNLOCK (webrtc);
+ }
+ }
+
+ /* Forward declaration so we can easily disconnect the signal handler */
+ static void _on_sctp_notify_dtls_state (GstWebRTCDTLSTransport * transport,
+ GParamSpec * pspec, GstWebRTCBin * webrtc);
+
+ static GstStructure *
+ _sctp_check_dtls_state_task (GstWebRTCBin * webrtc, gpointer unused)
+ {
+ TransportStream *stream;
+ GstWebRTCDTLSTransport *transport;
+ GstWebRTCDTLSTransportState dtls_state;
+ WebRTCSCTPTransport *sctp_transport;
+
+ stream = webrtc->priv->data_channel_transport;
+ transport = stream->transport;
+
+ g_object_get (transport, "state", &dtls_state, NULL);
+ /* Not connected yet so just return */
+ if (dtls_state != GST_WEBRTC_DTLS_TRANSPORT_STATE_CONNECTED) {
+ GST_DEBUG_OBJECT (webrtc,
+ "Data channel DTLS connection is not ready yet: %d", dtls_state);
+ return NULL;
+ }
+
+ GST_DEBUG_OBJECT (webrtc, "Data channel DTLS connection is now ready");
+ sctp_transport = webrtc->priv->sctp_transport;
+
+ /* Not locked state anymore so this was already taken care of before */
+ if (!gst_element_is_locked_state (sctp_transport->sctpdec))
+ return NULL;
+
+ /* Start up the SCTP elements now that the DTLS connection is established */
+ gst_element_set_locked_state (sctp_transport->sctpdec, FALSE);
+ gst_element_set_locked_state (sctp_transport->sctpenc, FALSE);
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (sctp_transport->sctpdec));
+ gst_element_sync_state_with_parent (GST_ELEMENT (sctp_transport->sctpenc));
+
+ if (sctp_transport->sctpdec_block_id) {
+ GstPad *receive_srcpad;
+
+ receive_srcpad =
+ gst_element_get_static_pad (GST_ELEMENT (stream->receive_bin),
+ "data_src");
+ gst_pad_remove_probe (receive_srcpad, sctp_transport->sctpdec_block_id);
+
+ sctp_transport->sctpdec_block_id = 0;
+ gst_object_unref (receive_srcpad);
+ }
+
+ g_signal_handlers_disconnect_by_func (transport, _on_sctp_notify_dtls_state,
+ webrtc);
+
+ return NULL;
+ }
+
+ static void
+ _on_sctp_notify_dtls_state (GstWebRTCDTLSTransport * transport,
+ GParamSpec * pspec, GstWebRTCBin * webrtc)
+ {
+ GstWebRTCDTLSTransportState dtls_state;
+
+ g_object_get (transport, "state", &dtls_state, NULL);
+
+ GST_TRACE_OBJECT (webrtc, "Data channel DTLS state changed to %d",
+ dtls_state);
+
+ /* Connected now, so schedule a task to update the state of the SCTP
+ * elements */
+ if (dtls_state == GST_WEBRTC_DTLS_TRANSPORT_STATE_CONNECTED) {
+ gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _sctp_check_dtls_state_task, NULL, NULL, NULL);
+ }
+ }
+
+ static GstPadProbeReturn
+ sctp_pad_block (GstPad * pad, GstPadProbeInfo * info, gpointer unused)
+ {
+ /* Drop all events: we don't care about them and don't want to block on
+ * them. Sticky events would be forwarded again later once we unblock
+ * and we don't want to forward them here already because that might
+ * cause a spurious GST_FLOW_FLUSHING */
+ if (GST_IS_EVENT (info->data))
+ return GST_PAD_PROBE_DROP;
+
+ /* But block on any actual data-flow so we don't accidentally send that
+ * to a pad that is not ready yet, causing GST_FLOW_FLUSHING and everything
+ * to silently stop.
+ */
+ GST_LOG_OBJECT (pad, "blocking pad with data %" GST_PTR_FORMAT, info->data);
+
+ return GST_PAD_PROBE_OK;
+ }
+
+ static TransportStream *
+ _get_or_create_data_channel_transports (GstWebRTCBin * webrtc, guint session_id)
+ {
+ if (!webrtc->priv->data_channel_transport) {
+ TransportStream *stream;
+ WebRTCSCTPTransport *sctp_transport;
+
+ stream = _find_transport_for_session (webrtc, session_id);
+
+ if (!stream)
+ stream = _create_transport_channel (webrtc, session_id);
+
+ webrtc->priv->data_channel_transport = stream;
+
+ if (!(sctp_transport = webrtc->priv->sctp_transport)) {
+ sctp_transport = webrtc_sctp_transport_new ();
+ sctp_transport->transport =
+ g_object_ref (webrtc->priv->data_channel_transport->transport);
+ sctp_transport->webrtcbin = webrtc;
+
+ /* Don't automatically start SCTP elements as part of webrtcbin. We
+ * need to delay this until the DTLS transport is fully connected! */
+ gst_element_set_locked_state (sctp_transport->sctpdec, TRUE);
+ gst_element_set_locked_state (sctp_transport->sctpenc, TRUE);
+
+ gst_bin_add (GST_BIN (webrtc), sctp_transport->sctpdec);
+ gst_bin_add (GST_BIN (webrtc), sctp_transport->sctpenc);
+ }
+
+ g_signal_connect (sctp_transport->sctpdec, "pad-added",
+ G_CALLBACK (_on_sctpdec_pad_added), webrtc);
+ g_signal_connect (sctp_transport, "notify::state",
+ G_CALLBACK (_on_sctp_state_notify), webrtc);
+
+ if (sctp_transport->sctpdec_block_id == 0) {
+ GstPad *receive_srcpad;
+ receive_srcpad =
+ gst_element_get_static_pad (GST_ELEMENT (stream->receive_bin),
+ "data_src");
+ sctp_transport->sctpdec_block_id =
+ gst_pad_add_probe (receive_srcpad,
+ GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM,
+ (GstPadProbeCallback) sctp_pad_block, NULL, NULL);
+ gst_object_unref (receive_srcpad);
+ }
+
+ if (!gst_element_link_pads (GST_ELEMENT (stream->receive_bin), "data_src",
+ GST_ELEMENT (sctp_transport->sctpdec), "sink"))
+ g_warn_if_reached ();
+
+ if (!gst_element_link_pads (GST_ELEMENT (sctp_transport->sctpenc), "src",
+ GST_ELEMENT (stream->send_bin), "data_sink"))
+ g_warn_if_reached ();
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (stream->send_bin));
+ gst_element_sync_state_with_parent (GST_ELEMENT (stream->receive_bin));
+
+ if (!webrtc->priv->sctp_transport) {
+ /* Connect to the notify::state signal to get notified when the DTLS
+ * connection is established. Only then can we start the SCTP elements */
+ g_signal_connect (stream->transport, "notify::state",
+ G_CALLBACK (_on_sctp_notify_dtls_state), webrtc);
+
+ /* As this would be racy otherwise, also schedule a task that checks the
+ * current state of the connection already without getting the signal
+ * called */
+ gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _sctp_check_dtls_state_task, NULL, NULL, NULL);
+ }
+
+ webrtc->priv->sctp_transport = sctp_transport;
+
+ gst_webrtc_bin_update_sctp_priority (webrtc);
+ }
+
+ return webrtc->priv->data_channel_transport;
+ }
+
+ static TransportStream *
+ _get_or_create_transport_stream (GstWebRTCBin * webrtc, guint session_id,
+ gboolean is_datachannel)
+ {
+ if (is_datachannel)
+ return _get_or_create_data_channel_transports (webrtc, session_id);
+ else
+ return _get_or_create_rtp_transport_channel (webrtc, session_id);
+ }
+
+ static guint
+ g_array_find_uint (GArray * array, guint val)
+ {
+ guint i;
+
+ for (i = 0; i < array->len; i++) {
+ if (g_array_index (array, guint, i) == val)
+ return i;
+ }
+
+ return G_MAXUINT;
+ }
+
+ static gboolean
+ _pick_available_pt (GArray * reserved_pts, guint * i)
+ {
+ gboolean ret = FALSE;
+
+ for (*i = 96; *i <= 127; (*i)++) {
+ if (g_array_find_uint (reserved_pts, *i) == G_MAXUINT) {
+ g_array_append_val (reserved_pts, *i);
+ ret = TRUE;
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ _pick_fec_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans,
+ GArray * reserved_pts, gint clockrate, gint * rtx_target_pt,
+ GstSDPMedia * media)
+ {
+ gboolean ret = TRUE;
+
+ if (trans->fec_type == GST_WEBRTC_FEC_TYPE_NONE)
+ goto done;
+
+ if (trans->fec_type == GST_WEBRTC_FEC_TYPE_ULP_RED && clockrate != -1) {
+ guint pt;
+ gchar *str;
+
+ if (!(ret = _pick_available_pt (reserved_pts, &pt)))
+ goto done;
+
+ /* https://tools.ietf.org/html/rfc5109#section-14.1 */
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+ str = g_strdup_printf ("%u red/%d", pt, clockrate);
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+
+ *rtx_target_pt = pt;
+
+ if (!(ret = _pick_available_pt (reserved_pts, &pt)))
+ goto done;
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+ str = g_strdup_printf ("%u ulpfec/%d", pt, clockrate);
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+ }
+
+ done:
+ return ret;
+ }
+
+ static gboolean
+ _pick_rtx_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans,
+ GArray * reserved_pts, gint clockrate, gint target_pt, guint target_ssrc,
+ GstSDPMedia * media)
+ {
+ gboolean ret = TRUE;
+
+ if (trans->local_rtx_ssrc_map)
+ gst_structure_free (trans->local_rtx_ssrc_map);
+
+ trans->local_rtx_ssrc_map =
+ gst_structure_new_empty ("application/x-rtp-ssrc-map");
+
+ if (trans->do_nack) {
+ guint pt;
+ gchar *str;
+
+ if (!(ret = _pick_available_pt (reserved_pts, &pt)))
+ goto done;
+
+ /* https://tools.ietf.org/html/rfc4588#section-8.6 */
+
+ str = g_strdup_printf ("%u", target_ssrc);
+ gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT,
+ g_random_int (), NULL);
+ g_free (str);
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+
+ str = g_strdup_printf ("%u rtx/%d", pt, clockrate);
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+
+ str = g_strdup_printf ("%u apt=%d", pt, target_pt);
+ gst_sdp_media_add_attribute (media, "fmtp", str);
+ g_free (str);
+ }
+
+ done:
+ return ret;
+ }
+
+ /* https://tools.ietf.org/html/rfc5576#section-4.2 */
+ static gboolean
+ _media_add_rtx_ssrc_group (GQuark field_id, const GValue * value,
+ GstSDPMedia * media)
+ {
+ gchar *str;
+
+ str =
+ g_strdup_printf ("FID %s %u", g_quark_to_string (field_id),
+ g_value_get_uint (value));
+ gst_sdp_media_add_attribute (media, "ssrc-group", str);
+
+ g_free (str);
+
+ return TRUE;
+ }
+
+ typedef struct
+ {
+ GstSDPMedia *media;
+ GstWebRTCBin *webrtc;
+ WebRTCTransceiver *trans;
+ } RtxSsrcData;
+
+ static gboolean
+ _media_add_rtx_ssrc (GQuark field_id, const GValue * value, RtxSsrcData * data)
+ {
+ gchar *str;
+ GstStructure *sdes;
+ const gchar *cname;
+
+ g_object_get (data->webrtc->rtpbin, "sdes", &sdes, NULL);
+ /* http://www.freesoft.org/CIE/RFC/1889/24.htm */
+ cname = gst_structure_get_string (sdes, "cname");
+
+ /* https://tools.ietf.org/html/draft-ietf-mmusic-msid-16 */
+ str =
+ g_strdup_printf ("%u msid:%s %s", g_value_get_uint (value),
+ cname, GST_OBJECT_NAME (data->trans));
+ gst_sdp_media_add_attribute (data->media, "ssrc", str);
+ g_free (str);
+
+ str = g_strdup_printf ("%u cname:%s", g_value_get_uint (value), cname);
+ gst_sdp_media_add_attribute (data->media, "ssrc", str);
+ g_free (str);
+
+ gst_structure_free (sdes);
+
+ return TRUE;
+ }
+
+ static void
+ _media_add_ssrcs (GstSDPMedia * media, GstCaps * caps, GstWebRTCBin * webrtc,
+ WebRTCTransceiver * trans)
+ {
+ guint i;
+ RtxSsrcData data = { media, webrtc, trans };
+ const gchar *cname;
+ GstStructure *sdes;
+
+ g_object_get (webrtc->rtpbin, "sdes", &sdes, NULL);
+ /* http://www.freesoft.org/CIE/RFC/1889/24.htm */
+ cname = gst_structure_get_string (sdes, "cname");
+
+ if (trans->local_rtx_ssrc_map)
+ gst_structure_foreach (trans->local_rtx_ssrc_map,
+ (GstStructureForeachFunc) _media_add_rtx_ssrc_group, media);
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ const GstStructure *s = gst_caps_get_structure (caps, i);
+ guint ssrc;
+
+ if (gst_structure_get_uint (s, "ssrc", &ssrc)) {
+ gchar *str;
+
+ /* https://tools.ietf.org/html/draft-ietf-mmusic-msid-16 */
+ str =
+ g_strdup_printf ("%u msid:%s %s", ssrc, cname,
+ GST_OBJECT_NAME (trans));
+ gst_sdp_media_add_attribute (media, "ssrc", str);
+ g_free (str);
+
+ str = g_strdup_printf ("%u cname:%s", ssrc, cname);
+ gst_sdp_media_add_attribute (media, "ssrc", str);
+ g_free (str);
+ }
+ }
+
+ gst_structure_free (sdes);
+
+ if (trans->local_rtx_ssrc_map)
+ gst_structure_foreach (trans->local_rtx_ssrc_map,
+ (GstStructureForeachFunc) _media_add_rtx_ssrc, &data);
+ }
+
+ static void
+ _add_fingerprint_to_media (GstWebRTCDTLSTransport * transport,
+ GstSDPMedia * media)
+ {
+ gchar *cert, *fingerprint, *val;
+
+ g_object_get (transport, "certificate", &cert, NULL);
+
+ fingerprint =
+ _generate_fingerprint_from_certificate (cert, G_CHECKSUM_SHA256);
+ g_free (cert);
+ val =
+ g_strdup_printf ("%s %s",
+ _g_checksum_to_webrtc_string (G_CHECKSUM_SHA256), fingerprint);
+ g_free (fingerprint);
+
+ gst_sdp_media_add_attribute (media, "fingerprint", val);
+ g_free (val);
+ }
+
+ /* based off https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-18#section-5.2.1 */
+ static gboolean
+ sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media,
+ GstWebRTCRTPTransceiver * trans, guint media_idx,
+ GString * bundled_mids, guint bundle_idx, gchar * bundle_ufrag,
+ gchar * bundle_pwd, GArray * reserved_pts, GHashTable * all_mids,
+ GError ** error)
+ {
+ /* TODO:
+ * rtp header extensions
+ * ice attributes
+ * rtx
+ * fec
+ * msid-semantics
+ * msid
+ * dtls fingerprints
+ * multiple dtls fingerprints https://tools.ietf.org/html/draft-ietf-mmusic-4572-update-05
+ */
+ GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc);
+ gchar *direction, *sdp_mid, *ufrag, *pwd;
+ gboolean bundle_only;
+ GstCaps *caps;
+ int i;
+
+ if (trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE
+ || trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE)
+ return FALSE;
+
+ g_assert (trans->mline == -1 || trans->mline == media_idx);
+
+ bundle_only = bundled_mids && bundle_idx != media_idx
+ && webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE;
+
+ /* mandated by JSEP */
+ gst_sdp_media_add_attribute (media, "setup", "actpass");
+
+ /* FIXME: deal with ICE restarts */
+ if (last_offer && trans->mline != -1 && trans->mid) {
+ ufrag = g_strdup (_media_get_ice_ufrag (last_offer, trans->mline));
+ pwd = g_strdup (_media_get_ice_pwd (last_offer, trans->mline));
+ GST_DEBUG_OBJECT (trans, "%u Using previous ice parameters", media_idx);
+ } else {
+ GST_DEBUG_OBJECT (trans,
+ "%u Generating new ice parameters mline %i, mid %s", media_idx,
+ trans->mline, trans->mid);
+ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ _generate_ice_credentials (&ufrag, &pwd);
+ } else {
+ g_assert (bundle_ufrag && bundle_pwd);
+ ufrag = g_strdup (bundle_ufrag);
+ pwd = g_strdup (bundle_pwd);
+ }
+ }
+
+ gst_sdp_media_add_attribute (media, "ice-ufrag", ufrag);
+ gst_sdp_media_add_attribute (media, "ice-pwd", pwd);
+ g_free (ufrag);
+ g_free (pwd);
+
+ gst_sdp_media_set_port_info (media, bundle_only || trans->stopped ? 0 : 9, 0);
+ gst_sdp_media_set_proto (media, "UDP/TLS/RTP/SAVPF");
+ gst_sdp_media_add_connection (media, "IN", "IP4", "0.0.0.0", 0, 0);
+
+ if (bundle_only) {
+ gst_sdp_media_add_attribute (media, "bundle-only", NULL);
+ }
+
+ /* FIXME: negotiate this */
+ /* FIXME: when bundle_only, these should not be added:
+ * https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-52#section-7.1.3
+ * However, this causes incompatibilities with current versions
+ * of the major browsers */
+ gst_sdp_media_add_attribute (media, "rtcp-mux", "");
+ gst_sdp_media_add_attribute (media, "rtcp-rsize", NULL);
+
+ direction =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ trans->direction);
+ gst_sdp_media_add_attribute (media, direction, "");
+ g_free (direction);
+
+ caps = _find_codec_preferences (webrtc, trans, media_idx, error);
+ caps = _add_supported_attributes_to_caps (webrtc, WEBRTC_TRANSCEIVER (trans),
+ caps);
+
+ if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
+ GST_WARNING_OBJECT (webrtc, "no caps available for transceiver, skipping");
+ if (caps)
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstCaps *format = gst_caps_new_empty ();
+ const GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_caps_append_structure (format, gst_structure_copy (s));
+
+ GST_DEBUG_OBJECT (webrtc, "Adding %u-th caps %" GST_PTR_FORMAT
+ " to %u-th media", i, format, media_idx);
+
+ /* this only looks at the first structure so we loop over the given caps
+ * and add each structure inside it piecemeal */
+ gst_sdp_media_set_media_from_caps (format, media);
+
+ gst_caps_unref (format);
+ }
+
+ {
+ const GstStructure *s = gst_caps_get_structure (caps, 0);
+ gint clockrate = -1;
+ gint rtx_target_pt;
+ gint original_rtx_target_pt; /* Workaround chrome bug: https://bugs.chromium.org/p/webrtc/issues/detail?id=6196 */
+ guint rtx_target_ssrc = -1;
+
+ if (gst_structure_get_int (s, "payload", &rtx_target_pt) &&
+ webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE)
+ g_array_append_val (reserved_pts, rtx_target_pt);
+
+ original_rtx_target_pt = rtx_target_pt;
+
+ if (!gst_structure_get_int (s, "clock-rate", &clockrate))
+ GST_WARNING_OBJECT (webrtc,
+ "Caps %" GST_PTR_FORMAT " are missing clock-rate", caps);
+ if (!gst_structure_get_uint (s, "ssrc", &rtx_target_ssrc))
+ GST_WARNING_OBJECT (webrtc, "Caps %" GST_PTR_FORMAT " are missing ssrc",
+ caps);
+
+ _pick_fec_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts,
+ clockrate, &rtx_target_pt, media);
+ _pick_rtx_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts,
+ clockrate, rtx_target_pt, rtx_target_ssrc, media);
+ if (original_rtx_target_pt != rtx_target_pt)
+ _pick_rtx_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts,
+ clockrate, original_rtx_target_pt, rtx_target_ssrc, media);
+ }
+
+ _media_add_ssrcs (media, caps, webrtc, WEBRTC_TRANSCEIVER (trans));
+
+ /* Some identifier; we also add the media name to it so it's identifiable */
+ if (trans->mid) {
+ gst_sdp_media_add_attribute (media, "mid", trans->mid);
+ } else {
+ /* Make sure to avoid mid collisions */
+ while (TRUE) {
+ sdp_mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media),
+ webrtc->priv->media_counter++);
+ if (g_hash_table_contains (all_mids, (gpointer) sdp_mid)) {
+ g_free (sdp_mid);
+ } else {
+ gst_sdp_media_add_attribute (media, "mid", sdp_mid);
+ g_hash_table_insert (all_mids, sdp_mid, NULL);
+ break;
+ }
+ }
+ }
+
+ /* TODO:
+ * - add a=candidate lines for gathered candidates
+ */
+
+ if (trans->sender) {
+ if (!trans->sender->transport) {
+ TransportStream *item;
+
+ item =
+ _get_or_create_transport_stream (webrtc,
+ bundled_mids ? bundle_idx : media_idx, FALSE);
+
+ webrtc_transceiver_set_transport (WEBRTC_TRANSCEIVER (trans), item);
+ }
+
+ _add_fingerprint_to_media (trans->sender->transport, media);
+ }
+
+ if (bundled_mids) {
+ const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid");
+
+ g_assert (mid);
+ g_string_append_printf (bundled_mids, " %s", mid);
+ }
+
+ gst_caps_unref (caps);
+
+ return TRUE;
+ }
+
+ static void
+ gather_pad_pt (GstWebRTCBinPad * pad, GArray * reserved_pts)
+ {
+ if (pad->received_caps) {
+ GstStructure *s = gst_caps_get_structure (pad->received_caps, 0);
+ gint pt;
+
+ if (gst_structure_get_int (s, "payload", &pt)) {
+ GST_TRACE_OBJECT (pad, "have reserved pt %u from received caps", pt);
+ g_array_append_val (reserved_pts, pt);
+ }
+ }
+ }
+
+ static GArray *
+ gather_reserved_pts (GstWebRTCBin * webrtc)
+ {
+ GstElement *element = GST_ELEMENT (webrtc);
+ GArray *reserved_pts = g_array_new (FALSE, FALSE, sizeof (guint));
+ guint i;
+
+ GST_OBJECT_LOCK (webrtc);
+ g_list_foreach (element->sinkpads, (GFunc) gather_pad_pt, reserved_pts);
+ g_list_foreach (webrtc->priv->pending_pads, (GFunc) gather_pad_pt,
+ reserved_pts);
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *trans;
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, i);
+ GST_OBJECT_LOCK (trans);
+ if (trans->codec_preferences) {
+ guint j, n;
+ gint pt;
+
+ n = gst_caps_get_size (trans->codec_preferences);
+ for (j = 0; j < n; j++) {
+ GstStructure *s = gst_caps_get_structure (trans->codec_preferences, j);
+ if (gst_structure_get_int (s, "payload", &pt)) {
+ GST_TRACE_OBJECT (trans, "have reserved pt %u from codec preferences",
+ pt);
+ g_array_append_val (reserved_pts, pt);
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (trans);
+ }
+ GST_OBJECT_UNLOCK (webrtc);
+
+ return reserved_pts;
+ }
+
+ static gboolean
+ _add_data_channel_offer (GstWebRTCBin * webrtc, GstSDPMessage * msg,
+ GstSDPMedia * media, GString * bundled_mids, guint bundle_idx,
+ gchar * bundle_ufrag, gchar * bundle_pwd, GHashTable * all_mids)
+ {
+ GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc);
+ gchar *ufrag, *pwd, *sdp_mid;
+ gboolean bundle_only = bundled_mids
+ && webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE
+ && gst_sdp_message_medias_len (msg) != bundle_idx;
+ guint last_data_index = G_MAXUINT;
+
+ /* add data channel support */
+ if (webrtc->priv->data_channels->len == 0)
+ return FALSE;
+
+ if (last_offer) {
+ last_data_index = _message_get_datachannel_index (last_offer);
+ if (last_data_index < G_MAXUINT) {
+ g_assert (last_data_index < gst_sdp_message_medias_len (last_offer));
+ /* XXX: is this always true when recycling transceivers?
+ * i.e. do we always put the data channel in the same mline */
+ g_assert (last_data_index == gst_sdp_message_medias_len (msg));
+ }
+ }
+
+ /* mandated by JSEP */
+ gst_sdp_media_add_attribute (media, "setup", "actpass");
+
+ /* FIXME: only needed when restarting ICE */
+ if (last_offer && last_data_index < G_MAXUINT) {
+ ufrag = g_strdup (_media_get_ice_ufrag (last_offer, last_data_index));
+ pwd = g_strdup (_media_get_ice_pwd (last_offer, last_data_index));
+ } else {
+ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ _generate_ice_credentials (&ufrag, &pwd);
+ } else {
+ ufrag = g_strdup (bundle_ufrag);
+ pwd = g_strdup (bundle_pwd);
+ }
+ }
+ gst_sdp_media_add_attribute (media, "ice-ufrag", ufrag);
+ gst_sdp_media_add_attribute (media, "ice-pwd", pwd);
+ g_free (ufrag);
+ g_free (pwd);
+
+ gst_sdp_media_set_media (media, "application");
+ gst_sdp_media_set_port_info (media, bundle_only ? 0 : 9, 0);
+ gst_sdp_media_set_proto (media, "UDP/DTLS/SCTP");
+ gst_sdp_media_add_connection (media, "IN", "IP4", "0.0.0.0", 0, 0);
+ gst_sdp_media_add_format (media, "webrtc-datachannel");
+
+ if (bundle_idx != gst_sdp_message_medias_len (msg))
+ gst_sdp_media_add_attribute (media, "bundle-only", NULL);
+
+ if (last_offer && last_data_index < G_MAXUINT) {
+ const GstSDPMedia *last_data_media;
+ const gchar *mid;
+
+ last_data_media = gst_sdp_message_get_media (last_offer, last_data_index);
+ mid = gst_sdp_media_get_attribute_val (last_data_media, "mid");
+
+ gst_sdp_media_add_attribute (media, "mid", mid);
+ } else {
+ /* Make sure to avoid mid collisions */
+ while (TRUE) {
+ sdp_mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media),
+ webrtc->priv->media_counter++);
+ if (g_hash_table_contains (all_mids, (gpointer) sdp_mid)) {
+ g_free (sdp_mid);
+ } else {
+ gst_sdp_media_add_attribute (media, "mid", sdp_mid);
+ g_hash_table_insert (all_mids, sdp_mid, NULL);
+ break;
+ }
+ }
+ }
+
+ if (bundled_mids) {
+ const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid");
+
+ g_assert (mid);
+ g_string_append_printf (bundled_mids, " %s", mid);
+ }
+
+ /* FIXME: negotiate this properly */
+ gst_sdp_media_add_attribute (media, "sctp-port", "5000");
+
+ _get_or_create_data_channel_transports (webrtc,
+ bundled_mids ? 0 : webrtc->priv->transceivers->len);
+ _add_fingerprint_to_media (webrtc->priv->sctp_transport->transport, media);
+
+ return TRUE;
+ }
+
+ /* TODO: use the options argument */
+ static GstSDPMessage *
+ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options,
+ GError ** error)
+ {
+ GstSDPMessage *ret = NULL;
+ GString *bundled_mids = NULL;
+ gchar *bundle_ufrag = NULL;
+ gchar *bundle_pwd = NULL;
+ GArray *reserved_pts = NULL;
+ GHashTable *all_mids =
+ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
+
+ GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc);
+ GList *seen_transceivers = NULL;
+ guint media_idx = 0;
+ int i;
+
+ gst_sdp_message_new (&ret);
+
+ gst_sdp_message_set_version (ret, "0");
+ {
+ gchar *v, *sess_id;
+ v = g_strdup_printf ("%u", webrtc->priv->offer_count++);
+ if (last_offer) {
+ const GstSDPOrigin *origin = gst_sdp_message_get_origin (last_offer);
+ sess_id = g_strdup (origin->sess_id);
+ } else {
+ sess_id = g_strdup_printf ("%" G_GUINT64_FORMAT, RANDOM_SESSION_ID);
+ }
+ gst_sdp_message_set_origin (ret, "-", sess_id, v, "IN", "IP4", "0.0.0.0");
+ g_free (sess_id);
+ g_free (v);
+ }
+ gst_sdp_message_set_session_name (ret, "-");
+ gst_sdp_message_add_time (ret, "0", "0", NULL);
+ gst_sdp_message_add_attribute (ret, "ice-options", "trickle");
+
+ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE) {
+ bundled_mids = g_string_new ("BUNDLE");
+ } else if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_MAX_COMPAT) {
+ bundled_mids = g_string_new ("BUNDLE");
+ }
+
+ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ GStrv last_bundle = NULL;
+ guint bundle_media_index;
+
+ reserved_pts = gather_reserved_pts (webrtc);
+ if (last_offer && _parse_bundle (last_offer, &last_bundle, NULL)
++#ifndef __TIZEN__
+ && last_bundle && last_bundle && last_bundle[0]
++#else
++ && last_bundle && last_bundle[0]
++#endif
+ && _get_bundle_index (last_offer, last_bundle, &bundle_media_index)) {
+ bundle_ufrag =
+ g_strdup (_media_get_ice_ufrag (last_offer, bundle_media_index));
+ bundle_pwd =
+ g_strdup (_media_get_ice_pwd (last_offer, bundle_media_index));
+ } else {
+ _generate_ice_credentials (&bundle_ufrag, &bundle_pwd);
+ }
+
+ g_strfreev (last_bundle);
+ }
+
+ /* FIXME: recycle transceivers */
+
+ /* Fill up the renegotiated streams first */
+ if (last_offer) {
+ for (i = 0; i < gst_sdp_message_medias_len (last_offer); i++) {
+ GstWebRTCRTPTransceiver *trans = NULL;
+ const GstSDPMedia *last_media;
+
+ last_media = gst_sdp_message_get_media (last_offer, i);
+
+ if (g_strcmp0 (gst_sdp_media_get_media (last_media), "audio") == 0
+ || g_strcmp0 (gst_sdp_media_get_media (last_media), "video") == 0) {
+ const gchar *last_mid;
+ int j;
+ last_mid = gst_sdp_media_get_attribute_val (last_media, "mid");
+
+ for (j = 0; j < webrtc->priv->transceivers->len; j++) {
+ trans = g_ptr_array_index (webrtc->priv->transceivers, j);
+
+ if (trans->mid && g_strcmp0 (trans->mid, last_mid) == 0) {
+ GstSDPMedia *media;
+ const gchar *mid;
+ WebRTCTransceiver *wtrans = WEBRTC_TRANSCEIVER (trans);
+
+ g_assert (!g_list_find (seen_transceivers, trans));
+
+ if (wtrans->mline_locked && trans->mline != media_idx) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_IMPOSSIBLE_MLINE_RESTRICTION,
+ "Previous negotiatied transceiver %"
+ GST_PTR_FORMAT " with mid %s was in mline %d but transceiver"
+ " has locked mline %u", trans, trans->mid, media_idx,
+ trans->mline);
+ goto cancel_offer;
+ }
+
+ GST_LOG_OBJECT (webrtc, "using previous negotiatied transceiver %"
+ GST_PTR_FORMAT " with mid %s into media index %u", trans,
+ trans->mid, media_idx);
+
+ /* FIXME: deal with format changes */
+ gst_sdp_media_copy (last_media, &media);
+ _media_replace_direction (media, trans->direction);
+
+ mid = gst_sdp_media_get_attribute_val (media, "mid");
+ g_assert (mid);
+
+ if (g_hash_table_contains (all_mids, mid)) {
+ gst_sdp_media_free (media);
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_FAILED,
+ "Duplicate mid %s when creating offer", mid);
+ goto cancel_offer;
+ }
+
+ g_hash_table_insert (all_mids, g_strdup (mid), NULL);
+
+ if (bundled_mids)
+ g_string_append_printf (bundled_mids, " %s", mid);
+
+ gst_sdp_message_add_media (ret, media);
+ media_idx++;
+
+ gst_sdp_media_free (media);
+ seen_transceivers = g_list_prepend (seen_transceivers, trans);
+ break;
+ }
+ }
+ } else if (g_strcmp0 (gst_sdp_media_get_media (last_media),
+ "application") == 0) {
+ GstSDPMedia media = { 0, };
+ gst_sdp_media_init (&media);
+ if (_add_data_channel_offer (webrtc, ret, &media, bundled_mids, 0,
+ bundle_ufrag, bundle_pwd, all_mids)) {
+ gst_sdp_message_add_media (ret, &media);
+ media_idx++;
+ } else {
+ gst_sdp_media_uninit (&media);
+ }
+ }
+ }
+ }
+
+ /* First, go over all transceivers and gather existing mids */
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *trans;
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, i);
+
+ if (g_list_find (seen_transceivers, trans))
+ continue;
+
+ if (trans->mid) {
+ if (g_hash_table_contains (all_mids, trans->mid)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_FAILED,
+ "Duplicate mid %s when creating offer", trans->mid);
+ goto cancel_offer;
+ }
+
+ g_hash_table_insert (all_mids, g_strdup (trans->mid), NULL);
+ }
+ }
+
+
+ /* add any extra streams */
+ for (;;) {
+ GstWebRTCRTPTransceiver *trans = NULL;
+ GstSDPMedia media = { 0, };
+
+ /* First find a transceiver requesting this m-line */
+ trans = _find_transceiver_for_mline (webrtc, media_idx);
+
+ if (trans) {
+ /* We can't have seen it already, because it is locked to this line */
+ g_assert (!g_list_find (seen_transceivers, trans));
+ seen_transceivers = g_list_prepend (seen_transceivers, trans);
+ } else {
+ /* Otherwise find a free transceiver */
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ WebRTCTransceiver *wtrans;
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, i);
+ wtrans = WEBRTC_TRANSCEIVER (trans);
+
+ /* don't add transceivers twice */
+ if (g_list_find (seen_transceivers, trans))
+ continue;
+
+ /* Ignore transceivers with a locked mline, as they would have been
+ * found above or will be used later */
+ if (wtrans->mline_locked)
+ continue;
+
+ seen_transceivers = g_list_prepend (seen_transceivers, trans);
+ /* don't add stopped transceivers */
+ if (trans->stopped) {
+ continue;
+ }
+
+ /* Otherwise take it */
+ break;
+ }
+
+ /* Stop if we got all transceivers */
+ if (i == webrtc->priv->transceivers->len) {
+
+ /* But try to add a data channel first, we do it here, because
+ * it can allow a locked m-line to be put after, so we need to
+ * do another iteration after.
+ */
+ if (_message_get_datachannel_index (ret) == G_MAXUINT) {
+ GstSDPMedia media = { 0, };
+ gst_sdp_media_init (&media);
+ if (_add_data_channel_offer (webrtc, ret, &media, bundled_mids, 0,
+ bundle_ufrag, bundle_pwd, all_mids)) {
+ gst_sdp_message_add_media (ret, &media);
+ media_idx++;
+ continue;
+ } else {
+ gst_sdp_media_uninit (&media);
+ }
+ }
+
+ /* Verify that we didn't ignore any locked m-line transceivers */
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ WebRTCTransceiver *wtrans;
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, i);
+ wtrans = WEBRTC_TRANSCEIVER (trans);
+ /* don't add transceivers twice */
+ if (g_list_find (seen_transceivers, trans))
+ continue;
+ g_assert (wtrans->mline_locked);
+
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_IMPOSSIBLE_MLINE_RESTRICTION,
+ "Tranceiver %" GST_PTR_FORMAT " with mid %s has locked mline %d"
+ " but the whole offer only has %u sections", trans, trans->mid,
+ trans->mline, media_idx);
+ goto cancel_offer;
+ }
+ break;
+ }
+ }
+
+ gst_sdp_media_init (&media);
+
+ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ reserved_pts = g_array_new (FALSE, FALSE, sizeof (guint));
+ }
+
+ GST_LOG_OBJECT (webrtc, "adding transceiver %" GST_PTR_FORMAT " at media "
+ "index %u", trans, media_idx);
+
+ if (sdp_media_from_transceiver (webrtc, &media, trans, media_idx,
+ bundled_mids, 0, bundle_ufrag, bundle_pwd, reserved_pts, all_mids,
+ error)) {
+ /* as per JSEP, a=rtcp-mux-only is only added for new streams */
+ gst_sdp_media_add_attribute (&media, "rtcp-mux-only", "");
+ gst_sdp_message_add_media (ret, &media);
+ media_idx++;
+ } else {
+ gst_sdp_media_uninit (&media);
+ }
+
+ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ g_array_free (reserved_pts, TRUE);
+ reserved_pts = NULL;
+ }
+ if (*error)
+ goto cancel_offer;
+ }
+
+ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ g_array_free (reserved_pts, TRUE);
+ reserved_pts = NULL;
+ }
+
+ webrtc->priv->max_sink_pad_serial = MAX (webrtc->priv->max_sink_pad_serial,
+ media_idx);
+
+ g_assert (media_idx == gst_sdp_message_medias_len (ret));
+
+ if (bundled_mids) {
+ gchar *mids = g_string_free (bundled_mids, FALSE);
+
+ gst_sdp_message_add_attribute (ret, "group", mids);
+ g_free (mids);
+ bundled_mids = NULL;
+ }
+
+ /* FIXME: pre-emptively setup receiving elements when needed */
+
+ if (webrtc->priv->last_generated_answer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_answer);
+ webrtc->priv->last_generated_answer = NULL;
+ if (webrtc->priv->last_generated_offer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_offer);
+ {
+ GstSDPMessage *copy;
+ gst_sdp_message_copy (ret, ©);
+ webrtc->priv->last_generated_offer =
+ gst_webrtc_session_description_new (GST_WEBRTC_SDP_TYPE_OFFER, copy);
+ }
+
+ out:
+ if (reserved_pts)
+ g_array_free (reserved_pts, TRUE);
+
+ g_hash_table_unref (all_mids);
+
+ g_list_free (seen_transceivers);
+
+ if (bundle_ufrag)
+ g_free (bundle_ufrag);
+
+ if (bundle_pwd)
+ g_free (bundle_pwd);
+
+ if (bundled_mids)
+ g_string_free (bundled_mids, TRUE);
+
+ return ret;
+
+ cancel_offer:
+ gst_sdp_message_free (ret);
+ ret = NULL;
+ goto out;
+ }
+
+ static void
+ _media_add_fec (GstSDPMedia * media, WebRTCTransceiver * trans, GstCaps * caps,
+ gint * rtx_target_pt)
+ {
+ guint i;
+
+ if (trans->fec_type == GST_WEBRTC_FEC_TYPE_NONE)
+ return;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ const GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (gst_structure_has_name (s, "application/x-rtp")) {
+ const gchar *encoding_name =
+ gst_structure_get_string (s, "encoding-name");
+ gint clock_rate;
+ gint pt;
+
+ if (gst_structure_get_int (s, "clock-rate", &clock_rate) &&
+ gst_structure_get_int (s, "payload", &pt)) {
+ if (!g_strcmp0 (encoding_name, "RED")) {
+ gchar *str;
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+ str = g_strdup_printf ("%u red/%d", pt, clock_rate);
+ *rtx_target_pt = pt;
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+ } else if (!g_strcmp0 (encoding_name, "ULPFEC")) {
+ gchar *str;
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+ str = g_strdup_printf ("%u ulpfec/%d", pt, clock_rate);
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+ }
+ }
+ }
+ }
+ }
+
+ static void
+ _media_add_rtx (GstSDPMedia * media, WebRTCTransceiver * trans,
+ GstCaps * offer_caps, gint target_pt, guint target_ssrc)
+ {
+ guint i;
+ const GstStructure *s;
+
+ if (trans->local_rtx_ssrc_map)
+ gst_structure_free (trans->local_rtx_ssrc_map);
+
+ trans->local_rtx_ssrc_map =
+ gst_structure_new_empty ("application/x-rtp-ssrc-map");
+
+ for (i = 0; i < gst_caps_get_size (offer_caps); i++) {
+ s = gst_caps_get_structure (offer_caps, i);
+
+ if (gst_structure_has_name (s, "application/x-rtp")) {
+ const gchar *encoding_name =
+ gst_structure_get_string (s, "encoding-name");
+ const gchar *apt_str = gst_structure_get_string (s, "apt");
+ gint apt;
+ gint clock_rate;
+ gint pt;
+
+ if (!apt_str)
+ continue;
+
+ apt = atoi (apt_str);
+
+ if (gst_structure_get_int (s, "clock-rate", &clock_rate) &&
+ gst_structure_get_int (s, "payload", &pt) && apt == target_pt) {
+ if (!g_strcmp0 (encoding_name, "RTX")) {
+ gchar *str;
+
+ str = g_strdup_printf ("%u", pt);
+ gst_sdp_media_add_format (media, str);
+ g_free (str);
+ str = g_strdup_printf ("%u rtx/%d", pt, clock_rate);
+ gst_sdp_media_add_attribute (media, "rtpmap", str);
+ g_free (str);
+
+ str = g_strdup_printf ("%d apt=%d", pt, apt);
+ gst_sdp_media_add_attribute (media, "fmtp", str);
+ g_free (str);
+
+ str = g_strdup_printf ("%u", target_ssrc);
+ gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT,
+ g_random_int (), NULL);
+ }
+ }
+ }
+ }
+ }
+
+ static gboolean
+ _update_transceiver_kind_from_caps (GstWebRTCRTPTransceiver * trans,
+ const GstCaps * caps)
+ {
+ GstWebRTCKind kind = webrtc_kind_from_caps (caps);
+
+ if (trans->kind == kind)
+ return TRUE;
+
+ if (trans->kind == GST_WEBRTC_KIND_UNKNOWN) {
+ trans->kind = kind;
+ return TRUE;
+ } else {
+ return FALSE;
+ }
+ }
+
+ static void
+ _get_rtx_target_pt_and_ssrc_from_caps (GstCaps * answer_caps, gint * target_pt,
+ guint * target_ssrc)
+ {
+ const GstStructure *s = gst_caps_get_structure (answer_caps, 0);
+
+ gst_structure_get_int (s, "payload", target_pt);
+ gst_structure_get_uint (s, "ssrc", target_ssrc);
+ }
+
+ /* TODO: use the options argument */
+ static GstSDPMessage *
+ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options,
+ GError ** error)
+ {
+ GstSDPMessage *ret = NULL;
+ const GstWebRTCSessionDescription *pending_remote =
+ webrtc->pending_remote_description;
+ guint i;
+ GStrv bundled = NULL;
+ guint bundle_idx = 0;
+ GString *bundled_mids = NULL;
+ gchar *bundle_ufrag = NULL;
+ gchar *bundle_pwd = NULL;
+ GList *seen_transceivers = NULL;
+ GstSDPMessage *last_answer = _get_latest_self_generated_sdp (webrtc);
+
+ if (!webrtc->pending_remote_description) {
+ g_set_error_literal (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_INVALID_STATE,
+ "Asked to create an answer without a remote description");
+ return NULL;
+ }
+
+ if (!_parse_bundle (pending_remote->sdp, &bundled, error))
+ goto out;
+
+ if (bundled) {
+ GStrv last_bundle = NULL;
+ guint bundle_media_index;
+
+ if (!_get_bundle_index (pending_remote->sdp, bundled, &bundle_idx)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Bundle tag is %s but no media found matching", bundled[0]);
+ goto out;
+ }
+
+ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) {
+ bundled_mids = g_string_new ("BUNDLE");
+ }
+
+ if (last_answer && _parse_bundle (last_answer, &last_bundle, NULL)
+ && last_bundle && last_bundle[0]
+ && _get_bundle_index (last_answer, last_bundle, &bundle_media_index)) {
+ bundle_ufrag =
+ g_strdup (_media_get_ice_ufrag (last_answer, bundle_media_index));
+ bundle_pwd =
+ g_strdup (_media_get_ice_pwd (last_answer, bundle_media_index));
+ } else {
+ _generate_ice_credentials (&bundle_ufrag, &bundle_pwd);
+ }
+
+ g_strfreev (last_bundle);
+ }
+
+ gst_sdp_message_new (&ret);
+
+ gst_sdp_message_set_version (ret, "0");
+ {
+ const GstSDPOrigin *offer_origin =
+ gst_sdp_message_get_origin (pending_remote->sdp);
+ gst_sdp_message_set_origin (ret, "-", offer_origin->sess_id,
+ offer_origin->sess_version, "IN", "IP4", "0.0.0.0");
+ }
+ gst_sdp_message_set_session_name (ret, "-");
+
+ for (i = 0; i < gst_sdp_message_attributes_len (pending_remote->sdp); i++) {
+ const GstSDPAttribute *attr =
+ gst_sdp_message_get_attribute (pending_remote->sdp, i);
+
+ if (g_strcmp0 (attr->key, "ice-options") == 0) {
+ gst_sdp_message_add_attribute (ret, attr->key, attr->value);
+ }
+ }
+
+ for (i = 0; i < gst_sdp_message_medias_len (pending_remote->sdp); i++) {
+ GstSDPMedia *media = NULL;
+ GstSDPMedia *offer_media;
+ GstWebRTCDTLSSetup offer_setup, answer_setup;
+ guint j, k;
+ gboolean bundle_only;
+ const gchar *mid;
+
+ offer_media =
+ (GstSDPMedia *) gst_sdp_message_get_media (pending_remote->sdp, i);
+ bundle_only = _media_has_attribute_key (offer_media, "bundle-only");
+
+ gst_sdp_media_new (&media);
+ if (bundle_only && webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE)
+ gst_sdp_media_set_port_info (media, 0, 0);
+ else
+ gst_sdp_media_set_port_info (media, 9, 0);
+ gst_sdp_media_add_connection (media, "IN", "IP4", "0.0.0.0", 0, 0);
+
+ {
+ gchar *ufrag, *pwd;
+
+ /* FIXME: deal with ICE restarts */
+ if (last_answer && i < gst_sdp_message_medias_len (last_answer)) {
+ ufrag = g_strdup (_media_get_ice_ufrag (last_answer, i));
+ pwd = g_strdup (_media_get_ice_pwd (last_answer, i));
+ } else {
+ if (!bundled) {
+ _generate_ice_credentials (&ufrag, &pwd);
+ } else {
+ ufrag = g_strdup (bundle_ufrag);
+ pwd = g_strdup (bundle_pwd);
+ }
+ }
+ gst_sdp_media_add_attribute (media, "ice-ufrag", ufrag);
+ gst_sdp_media_add_attribute (media, "ice-pwd", pwd);
+ g_free (ufrag);
+ g_free (pwd);
+ }
+
+ for (j = 0; j < gst_sdp_media_attributes_len (offer_media); j++) {
+ const GstSDPAttribute *attr =
+ gst_sdp_media_get_attribute (offer_media, j);
+
+ if (g_strcmp0 (attr->key, "mid") == 0
+ || g_strcmp0 (attr->key, "rtcp-mux") == 0) {
+ gst_sdp_media_add_attribute (media, attr->key, attr->value);
+ /* FIXME: handle anything we want to keep */
+ }
+ }
+
+ mid = gst_sdp_media_get_attribute_val (media, "mid");
+ /* XXX: not strictly required but a lot of functionality requires a mid */
+ g_assert (mid);
+
+ /* set the a=setup: attribute */
+ offer_setup = _get_dtls_setup_from_media (offer_media);
+ answer_setup = _intersect_dtls_setup (offer_setup);
+ if (answer_setup == GST_WEBRTC_DTLS_SETUP_NONE) {
+ GST_WARNING_OBJECT (webrtc, "Could not intersect offer setup with "
+ "transceiver direction");
+ goto rejected;
+ }
+ _media_replace_setup (media, answer_setup);
+
+ if (g_strcmp0 (gst_sdp_media_get_media (offer_media), "application") == 0) {
+ int sctp_port;
+
+ if (gst_sdp_media_formats_len (offer_media) != 1) {
+ GST_WARNING_OBJECT (webrtc, "Could not find a format in the m= line "
+ "for webrtc-datachannel");
+ goto rejected;
+ }
+ sctp_port = _get_sctp_port_from_media (offer_media);
+ if (sctp_port == -1) {
+ GST_WARNING_OBJECT (webrtc, "media does not contain a sctp port");
+ goto rejected;
+ }
+
+ /* XXX: older browsers will produce a different SDP format for data
+ * channel that is currently not parsed correctly */
+ gst_sdp_media_set_proto (media, "UDP/DTLS/SCTP");
+
+ gst_sdp_media_set_media (media, "application");
+ gst_sdp_media_set_port_info (media, 9, 0);
+ gst_sdp_media_add_format (media, "webrtc-datachannel");
+
+ /* FIXME: negotiate this properly on renegotiation */
+ gst_sdp_media_add_attribute (media, "sctp-port", "5000");
+
+ _get_or_create_data_channel_transports (webrtc,
+ bundled_mids ? bundle_idx : i);
+
+ if (bundled_mids) {
+ g_assert (mid);
+ g_string_append_printf (bundled_mids, " %s", mid);
+ }
+
+ _add_fingerprint_to_media (webrtc->priv->sctp_transport->transport,
+ media);
+ } else if (g_strcmp0 (gst_sdp_media_get_media (offer_media), "audio") == 0
+ || g_strcmp0 (gst_sdp_media_get_media (offer_media), "video") == 0) {
+ GstCaps *offer_caps, *answer_caps = NULL;
+ GstWebRTCRTPTransceiver *rtp_trans = NULL;
+ WebRTCTransceiver *trans = NULL;
+ GstWebRTCRTPTransceiverDirection offer_dir, answer_dir;
+ gint target_pt = -1;
+ gint original_target_pt = -1;
+ guint target_ssrc = 0;
+
+ gst_sdp_media_set_proto (media, "UDP/TLS/RTP/SAVPF");
+ offer_caps = _rtp_caps_from_media (offer_media);
+
+ if (last_answer && i < gst_sdp_message_medias_len (last_answer)
+ && (rtp_trans =
+ _find_transceiver (webrtc, mid,
+ (FindTransceiverFunc) match_for_mid))) {
+ const GstSDPMedia *last_media =
+ gst_sdp_message_get_media (last_answer, i);
+ const gchar *last_mid =
+ gst_sdp_media_get_attribute_val (last_media, "mid");
+ GstCaps *current_caps;
+
+ /* FIXME: assumes no shenanigans with recycling transceivers */
+ g_assert (g_strcmp0 (mid, last_mid) == 0);
+
+ current_caps = _find_codec_preferences (webrtc, rtp_trans, i, error);
+ if (*error) {
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+ if (!current_caps)
+ current_caps = _rtp_caps_from_media (last_media);
+
+ if (current_caps) {
+ answer_caps = gst_caps_intersect (offer_caps, current_caps);
+ if (gst_caps_is_empty (answer_caps)) {
+ GST_WARNING_OBJECT (webrtc, "Caps from offer for m-line %d (%"
+ GST_PTR_FORMAT ") don't intersect with caps from codec"
+ " preferences and transceiver %" GST_PTR_FORMAT, i, offer_caps,
+ current_caps);
+ gst_caps_unref (current_caps);
+ gst_caps_unref (answer_caps);
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+ gst_caps_unref (current_caps);
+ }
+
+ /* XXX: In theory we're meant to use the sendrecv formats for the
+ * inactive direction however we don't know what that may be and would
+ * require asking outside what it expects to possibly send later */
+
+ GST_LOG_OBJECT (webrtc, "Found existing previously negotiated "
+ "transceiver %" GST_PTR_FORMAT " from mid %s for mline %u "
+ "using caps %" GST_PTR_FORMAT, rtp_trans, mid, i, answer_caps);
+ } else {
+ for (j = 0; j < webrtc->priv->transceivers->len; j++) {
+ GstCaps *trans_caps;
+
+ rtp_trans = g_ptr_array_index (webrtc->priv->transceivers, j);
+
+ if (g_list_find (seen_transceivers, rtp_trans)) {
+ /* Don't double allocate a transceiver to multiple mlines */
+ rtp_trans = NULL;
+ continue;
+ }
+
+ trans_caps = _find_codec_preferences (webrtc, rtp_trans, j, error);
+ if (*error) {
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT
+ " and %" GST_PTR_FORMAT, offer_caps, trans_caps);
+
+ /* FIXME: technically this is a little overreaching as some fields we
+ * we can deal with not having and/or we may have unrecognized fields
+ * that we cannot actually support */
+ if (trans_caps) {
+ answer_caps = gst_caps_intersect (offer_caps, trans_caps);
+ gst_caps_unref (trans_caps);
+ if (answer_caps) {
+ if (!gst_caps_is_empty (answer_caps)) {
+ GST_LOG_OBJECT (webrtc,
+ "found compatible transceiver %" GST_PTR_FORMAT
+ " for offer media %u", rtp_trans, i);
+ break;
+ }
+ gst_caps_unref (answer_caps);
+ answer_caps = NULL;
+ }
+ }
+ rtp_trans = NULL;
+ }
+ }
+
+ if (rtp_trans) {
+ answer_dir = rtp_trans->direction;
+ g_assert (answer_caps != NULL);
+ } else {
+ /* if no transceiver, then we only receive that stream and respond with
+ * the intersection with the transceivers codec preferences caps */
+ answer_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY;
+ }
+
+ if (!rtp_trans) {
+ GstCaps *trans_caps;
+ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN;
+
+ if (g_strcmp0 (gst_sdp_media_get_media (offer_media), "audio") == 0)
+ kind = GST_WEBRTC_KIND_AUDIO;
+ else if (g_strcmp0 (gst_sdp_media_get_media (offer_media),
+ "video") == 0)
+ kind = GST_WEBRTC_KIND_VIDEO;
+ else
+ GST_LOG_OBJECT (webrtc, "Unknown media kind %s",
+ GST_STR_NULL (gst_sdp_media_get_media (offer_media)));
+
+ trans = _create_webrtc_transceiver (webrtc, answer_dir, i, kind, NULL);
+ rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans);
+
+ GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT
+ " for mline %u with media kind %d", trans, i, kind);
+
+ trans_caps = _find_codec_preferences (webrtc, rtp_trans, i, error);
+ if (*error) {
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT
+ " and %" GST_PTR_FORMAT, offer_caps, trans_caps);
+
+ /* FIXME: technically this is a little overreaching as some fields we
+ * we can deal with not having and/or we may have unrecognized fields
+ * that we cannot actually support */
+ if (trans_caps) {
+ answer_caps = gst_caps_intersect (offer_caps, trans_caps);
+ gst_caps_unref (trans_caps);
+ } else {
+ answer_caps = gst_caps_ref (offer_caps);
+ }
+ } else {
+ trans = WEBRTC_TRANSCEIVER (rtp_trans);
+ }
+
+ seen_transceivers = g_list_prepend (seen_transceivers, rtp_trans);
+
+ if (gst_caps_is_empty (answer_caps)) {
+ GST_WARNING_OBJECT (webrtc, "Could not create caps for media");
+ gst_caps_unref (answer_caps);
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+
+ if (!_update_transceiver_kind_from_caps (rtp_trans, answer_caps))
+ GST_WARNING_OBJECT (webrtc,
+ "Trying to change transceiver %d kind from %d to %d",
+ rtp_trans->mline, rtp_trans->kind,
+ webrtc_kind_from_caps (answer_caps));
+
+ if (!trans->do_nack) {
+ answer_caps = gst_caps_make_writable (answer_caps);
+ for (k = 0; k < gst_caps_get_size (answer_caps); k++) {
+ GstStructure *s = gst_caps_get_structure (answer_caps, k);
+ gst_structure_remove_fields (s, "rtcp-fb-nack", NULL);
+ }
+ }
+
+ gst_sdp_media_set_media_from_caps (answer_caps, media);
+
+ _get_rtx_target_pt_and_ssrc_from_caps (answer_caps, &target_pt,
+ &target_ssrc);
+
+ original_target_pt = target_pt;
+
+ _media_add_fec (media, trans, offer_caps, &target_pt);
+ if (trans->do_nack) {
+ _media_add_rtx (media, trans, offer_caps, target_pt, target_ssrc);
+ if (target_pt != original_target_pt)
+ _media_add_rtx (media, trans, offer_caps, original_target_pt,
+ target_ssrc);
+ }
+
+ if (answer_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY)
+ _media_add_ssrcs (media, answer_caps, webrtc,
+ WEBRTC_TRANSCEIVER (rtp_trans));
+
+ gst_caps_unref (answer_caps);
+ answer_caps = NULL;
+
+ /* set the new media direction */
+ offer_dir = _get_direction_from_media (offer_media);
+ answer_dir = _intersect_answer_directions (offer_dir, answer_dir);
+ if (answer_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ GST_WARNING_OBJECT (webrtc, "Could not intersect offer direction with "
+ "transceiver direction");
+ gst_caps_unref (offer_caps);
+ goto rejected;
+ }
+ _media_replace_direction (media, answer_dir);
+
+ if (!trans->stream) {
+ TransportStream *item;
+
+ item =
+ _get_or_create_transport_stream (webrtc,
+ bundled_mids ? bundle_idx : i, FALSE);
+ webrtc_transceiver_set_transport (trans, item);
+ }
+
+ if (bundled_mids) {
+ const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid");
+
+ g_assert (mid);
+ g_string_append_printf (bundled_mids, " %s", mid);
+ }
+
+ /* set the a=fingerprint: for this transport */
+ _add_fingerprint_to_media (trans->stream->transport, media);
+
+ gst_caps_unref (offer_caps);
+ } else {
+ GST_WARNING_OBJECT (webrtc, "unknown m= line media name");
+ goto rejected;
+ }
+
+ if (0) {
+ rejected:
+ GST_INFO_OBJECT (webrtc, "media %u rejected", i);
+ gst_sdp_media_free (media);
+ gst_sdp_media_copy (offer_media, &media);
+ gst_sdp_media_set_port_info (media, 0, 0);
+ }
+ gst_sdp_message_add_media (ret, media);
+ gst_sdp_media_free (media);
+ }
+
+ if (bundled_mids) {
+ gchar *mids = g_string_free (bundled_mids, FALSE);
+
+ gst_sdp_message_add_attribute (ret, "group", mids);
+ g_free (mids);
+ }
+
+ if (bundle_ufrag)
+ g_free (bundle_ufrag);
+
+ if (bundle_pwd)
+ g_free (bundle_pwd);
+
+ /* FIXME: can we add not matched transceivers? */
+
+ /* XXX: only true for the initial offerer */
+ gst_webrtc_ice_set_is_controller (webrtc->priv->ice, FALSE);
+
+ out:
+ g_strfreev (bundled);
+
+ g_list_free (seen_transceivers);
+
+ if (webrtc->priv->last_generated_offer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_offer);
+ webrtc->priv->last_generated_offer = NULL;
+ if (webrtc->priv->last_generated_answer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_answer);
+ {
+ GstSDPMessage *copy;
+ gst_sdp_message_copy (ret, ©);
+ webrtc->priv->last_generated_answer =
+ gst_webrtc_session_description_new (GST_WEBRTC_SDP_TYPE_ANSWER, copy);
+ }
+
+ return ret;
+ }
+
+ struct create_sdp
+ {
+ GstStructure *options;
+ GstWebRTCSDPType type;
+ };
+
+ static GstStructure *
+ _create_sdp_task (GstWebRTCBin * webrtc, struct create_sdp *data)
+ {
+ GstWebRTCSessionDescription *desc = NULL;
+ GstSDPMessage *sdp = NULL;
+ GstStructure *s = NULL;
+ GError *error = NULL;
+
+ GST_INFO_OBJECT (webrtc, "creating %s sdp with options %" GST_PTR_FORMAT,
+ gst_webrtc_sdp_type_to_string (data->type), data->options);
+
+ if (data->type == GST_WEBRTC_SDP_TYPE_OFFER)
+ sdp = _create_offer_task (webrtc, data->options, &error);
+ else if (data->type == GST_WEBRTC_SDP_TYPE_ANSWER)
+ sdp = _create_answer_task (webrtc, data->options, &error);
+ else {
+ g_assert_not_reached ();
+ goto out;
+ }
+
+ if (sdp) {
+ desc = gst_webrtc_session_description_new (data->type, sdp);
+ s = gst_structure_new ("application/x-gst-promise",
+ gst_webrtc_sdp_type_to_string (data->type),
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION, desc, NULL);
+ } else {
+ g_warn_if_fail (error != NULL);
+ GST_WARNING_OBJECT (webrtc, "returning error: %s",
+ error ? error->message : "Unknown");
+ s = gst_structure_new ("application/x-gstwebrtcbin-error",
+ "error", G_TYPE_ERROR, error, NULL);
+ g_clear_error (&error);
+ }
+
+ out:
+
+ if (desc)
+ gst_webrtc_session_description_free (desc);
+
+ return s;
+ }
+
+ static void
+ _free_create_sdp_data (struct create_sdp *data)
+ {
+ if (data->options)
+ gst_structure_free (data->options);
+ g_free (data);
+ }
+
+ static void
+ gst_webrtc_bin_create_offer (GstWebRTCBin * webrtc,
+ const GstStructure * options, GstPromise * promise)
+ {
+ struct create_sdp *data = g_new0 (struct create_sdp, 1);
+
+ if (options)
+ data->options = gst_structure_copy (options);
+ data->type = GST_WEBRTC_SDP_TYPE_OFFER;
+
+ if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _create_sdp_task,
+ data, (GDestroyNotify) _free_create_sdp_data, promise)) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "Could not create offer. webrtcbin is closed");
+ GstStructure *s =
+ gst_structure_new ("application/x-gstwebrtcbin-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (promise, s);
+
+ g_clear_error (&error);
+ }
+ }
+
+ static void
+ gst_webrtc_bin_create_answer (GstWebRTCBin * webrtc,
+ const GstStructure * options, GstPromise * promise)
+ {
+ struct create_sdp *data = g_new0 (struct create_sdp, 1);
+
+ if (options)
+ data->options = gst_structure_copy (options);
+ data->type = GST_WEBRTC_SDP_TYPE_ANSWER;
+
+ if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _create_sdp_task,
+ data, (GDestroyNotify) _free_create_sdp_data, promise)) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "Could not create answer. webrtcbin is closed.");
+ GstStructure *s =
+ gst_structure_new ("application/x-gstwebrtcbin-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (promise, s);
+
+ g_clear_error (&error);
+ }
+ }
+
+ static GstWebRTCBinPad *
+ _create_pad_for_sdp_media (GstWebRTCBin * webrtc, GstPadDirection direction,
+ GstWebRTCRTPTransceiver * trans, guint serial)
+ {
+ GstWebRTCBinPad *pad;
+ gchar *pad_name;
+
+ if (direction == GST_PAD_SINK) {
+ if (serial == G_MAXUINT)
+ serial = webrtc->priv->max_sink_pad_serial++;
+ } else {
+ serial = trans->mline;
+ }
+
+ pad_name =
+ g_strdup_printf ("%s_%u", direction == GST_PAD_SRC ? "src" : "sink",
+ serial);
+ pad = gst_webrtc_bin_pad_new (pad_name, direction);
+ g_free (pad_name);
+
+ pad->trans = gst_object_ref (trans);
+
+ return pad;
+ }
+
+ static GstWebRTCRTPTransceiver *
+ _find_transceiver_for_sdp_media (GstWebRTCBin * webrtc,
+ const GstSDPMessage * sdp, guint media_idx)
+ {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, media_idx);
+ GstWebRTCRTPTransceiver *ret = NULL;
+ int i;
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "mid") == 0) {
+ if ((ret =
+ _find_transceiver (webrtc, attr->value,
+ (FindTransceiverFunc) match_for_mid)))
+ goto out;
+ }
+ }
+
+ ret = _find_transceiver (webrtc, &media_idx,
+ (FindTransceiverFunc) transceiver_match_for_mline);
+
+ out:
+ GST_TRACE_OBJECT (webrtc, "Found transceiver %" GST_PTR_FORMAT, ret);
+ return ret;
+ }
+
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++static void
++_insert_netsim_element_between (GstWebRTCBin * webrtc, GstElement * srcbin,
++ const gchar * srcpadname, GstElement * sinkbin, const gchar * sinkpadname,
++ guint idx)
++{
++ gboolean send = !g_strcmp0 (sinkpadname, "rtp_sink");
++ gchar *netsim_name = g_strdup_printf ("netsim_%s_%u",
++ send ? "send" : "recv", idx);
++ GstElement *netsim = gst_element_factory_make ("netsim", netsim_name);
++ g_free (netsim_name);
++
++ gst_bin_add (GST_BIN (webrtc), netsim);
++ g_object_set (netsim, "drop-probability",
++ send ? webrtc->priv->drop_probability_sender :
++ webrtc->priv->drop_probability_receiver, NULL);
++ gst_element_sync_state_with_parent (netsim);
++
++ if (!gst_element_link_pads (srcbin, srcpadname, netsim, "sink"))
++ g_warn_if_reached ();
++
++ if (!gst_element_link_pads (netsim, "src", sinkbin, sinkpadname))
++ g_warn_if_reached ();
++}
++
++#endif
+ static GstPad *
+ _connect_input_stream (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad)
+ {
+ /*
+ * Not-bundle case:
+ *
+ * ,--------------------------------------------webrtcbin-------------------------,
+ * ; ;
+ * ; ,-------rtpbin-------, ,--transport_send_%u--, ;
+ * ; ; send_rtp_src_%u o---o rtp_sink ; ;
+ * ; ,---clocksync---, ; ; ; ; ;
+ * ; ; ; ; send_rtcp_src_%u o---o rtcp_sink ; ;
+ * ; sink_%u ; ; ; ; '---------------------' ;
+ * o---------o sink src o---o send_rtp_sink_%u ; ;
+ * ; '---------------' '--------------------' ;
+ * '------------------------------------------------------------------------------'
+ */
+
+ /*
+ * Bundle case:
+ * ,-----------------------------------------------------webrtcbin--------------------------------,
+ * ; ;
+ * ; ,-------rtpbin-------, ,--transport_send_%u--, ;
+ * ; ; send_rtp_src_%u o---o rtp_sink ; ;
+ * ; ; ; ; ; ;
+ * ; sink_%u ,---clocksync---, ,---funnel---, ; send_rtcp_src_%u o---o rtcp_sink ; ;
+ * o----------o sink src o---o sink_%u ; ; ; '---------------------' ;
+ * ; '---------------' ; ; ; ; ;
+ * ; ; src o-o send_rtp_sink_%u ; ;
+ * ; sink_%u ,---clocksync---, ; ; ; ; ;
+ * o----------o sink src o---o sink%u ; '--------------------' ;
+ * ; '---------------' '------------' ;
+ * '----------------------------------------------------------------------------------------------'
+ */
+ GstPadTemplate *rtp_templ;
+ GstPad *rtp_sink, *sinkpad, *srcpad;
+ gchar *pad_name;
+ WebRTCTransceiver *trans;
+ GstElement *clocksync;
+
+ g_return_val_if_fail (pad->trans != NULL, NULL);
+
+ trans = WEBRTC_TRANSCEIVER (pad->trans);
+
+ GST_INFO_OBJECT (pad, "linking input stream %u", pad->trans->mline);
+
+ g_assert (trans->stream);
+
+ clocksync = gst_element_factory_make ("clocksync", NULL);
+ g_object_set (clocksync, "sync", TRUE, NULL);
+ gst_bin_add (GST_BIN (webrtc), clocksync);
+ gst_element_sync_state_with_parent (clocksync);
+
+ srcpad = gst_element_get_static_pad (clocksync, "src");
+ sinkpad = gst_element_get_static_pad (clocksync, "sink");
+
+ if (!webrtc->rtpfunnel) {
+ rtp_templ =
+ _find_pad_template (webrtc->rtpbin, GST_PAD_SINK, GST_PAD_REQUEST,
+ "send_rtp_sink_%u");
+ g_assert (rtp_templ);
+
+ pad_name = g_strdup_printf ("send_rtp_sink_%u", pad->trans->mline);
+ rtp_sink =
+ gst_element_request_pad (webrtc->rtpbin, rtp_templ, pad_name, NULL);
+ g_free (pad_name);
+ gst_pad_link (srcpad, rtp_sink);
+ gst_object_unref (rtp_sink);
+
+ gst_ghost_pad_set_target (GST_GHOST_PAD (pad), sinkpad);
+
+ pad_name = g_strdup_printf ("send_rtp_src_%u", pad->trans->mline);
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ if (webrtc->priv->netsim) {
++ _insert_netsim_element_between (webrtc, GST_ELEMENT (webrtc->rtpbin), pad_name,
++ GST_ELEMENT (trans->stream->send_bin), "rtp_sink", pad->trans->mline);
++ } else {
++#endif
+ if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name,
+ GST_ELEMENT (trans->stream->send_bin), "rtp_sink"))
+ g_warn_if_reached ();
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ }
++#endif
+ g_free (pad_name);
+ } else {
+ gchar *pad_name = g_strdup_printf ("sink_%u", pad->trans->mline);
+ GstPad *funnel_sinkpad =
+ gst_element_request_pad_simple (webrtc->rtpfunnel, pad_name);
+
+ gst_pad_link (srcpad, funnel_sinkpad);
+ gst_ghost_pad_set_target (GST_GHOST_PAD (pad), sinkpad);
+
+ g_free (pad_name);
+ gst_object_unref (funnel_sinkpad);
+ }
+
+ gst_object_unref (srcpad);
+ gst_object_unref (sinkpad);
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (trans->stream->send_bin));
+
+ return GST_PAD (pad);
+ }
+
+ /* output pads are receiving elements */
+ static void
+ _connect_output_stream (GstWebRTCBin * webrtc,
+ TransportStream * stream, guint session_id)
+ {
+ /*
+ * ,------------------------webrtcbin------------------------,
+ * ; ,---------rtpbin---------, ;
+ * ; ,-transport_receive_%u--, ; ; ;
+ * ; ; rtp_src o---o recv_rtp_sink_%u ; ;
+ * ; ; ; ; ; ;
+ * ; ; rtcp_src o---o recv_rtcp_sink_%u ; ;
+ * ; '-----------------------' ; ; ; src_%u
+ * ; ; recv_rtp_src_%u_%u_%u o--o
+ * ; '------------------------' ;
+ * '---------------------------------------------------------'
+ */
+ gchar *pad_name;
+
+ if (stream->output_connected) {
+ GST_DEBUG_OBJECT (webrtc, "stream %" GST_PTR_FORMAT " is already "
+ "connected to rtpbin. Not connecting", stream);
+ return;
+ }
+
+ GST_INFO_OBJECT (webrtc, "linking output stream %u %" GST_PTR_FORMAT,
+ session_id, stream);
+
+ pad_name = g_strdup_printf ("recv_rtp_sink_%u", session_id);
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ if (webrtc->priv->netsim) {
++ _insert_netsim_element_between (webrtc, GST_ELEMENT (stream->receive_bin),
++ "rtp_src", GST_ELEMENT (webrtc->rtpbin), pad_name, session_id);
++ } else {
++#endif
+ if (!gst_element_link_pads (GST_ELEMENT (stream->receive_bin),
+ "rtp_src", GST_ELEMENT (webrtc->rtpbin), pad_name))
+ g_warn_if_reached ();
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ }
++#endif
+ g_free (pad_name);
+
+ gst_element_sync_state_with_parent (GST_ELEMENT (stream->receive_bin));
+
+ /* The webrtcbin src_%u output pads will be created when rtpbin receives
+ * data on that stream in on_rtpbin_pad_added() */
+
+ stream->output_connected = TRUE;
+ }
+
+ typedef struct
+ {
+ guint mlineindex;
+ gchar *candidate;
+ } IceCandidateItem;
+
+ static void
+ _clear_ice_candidate_item (IceCandidateItem * item)
+ {
+ g_free (item->candidate);
+ }
+
+ static void
+ _add_ice_candidate (GstWebRTCBin * webrtc, IceCandidateItem * item,
+ gboolean drop_invalid)
+ {
+ GstWebRTCICEStream *stream;
+
+ stream = _find_ice_stream_for_session (webrtc, item->mlineindex);
+ if (stream == NULL) {
+ if (drop_invalid) {
+ GST_WARNING_OBJECT (webrtc, "Unknown mline %u, dropping",
+ item->mlineindex);
+ } else {
+ IceCandidateItem new;
+ new.mlineindex = item->mlineindex;
+ new.candidate = g_strdup (item->candidate);
+ GST_INFO_OBJECT (webrtc, "Unknown mline %u, deferring", item->mlineindex);
+
+ ICE_LOCK (webrtc);
+ g_array_append_val (webrtc->priv->pending_remote_ice_candidates, new);
+ ICE_UNLOCK (webrtc);
+ }
+ return;
+ }
+
+ GST_LOG_OBJECT (webrtc, "adding ICE candidate with mline:%u, %s",
+ item->mlineindex, item->candidate);
+
+ gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, item->candidate);
+ }
+
+ static void
+ _add_ice_candidates_from_sdp (GstWebRTCBin * webrtc, gint mlineindex,
+ const GstSDPMedia * media)
+ {
+ gint a;
+ GstWebRTCICEStream *stream = NULL;
+
+ for (a = 0; a < gst_sdp_media_attributes_len (media); a++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, a);
+ if (g_strcmp0 (attr->key, "candidate") == 0) {
+ gchar *candidate;
+
+ if (stream == NULL)
+ stream = _find_ice_stream_for_session (webrtc, mlineindex);
+ if (stream == NULL) {
+ GST_DEBUG_OBJECT (webrtc,
+ "Unknown mline %u, dropping ICE candidates from SDP", mlineindex);
+ return;
+ }
+
+ candidate = g_strdup_printf ("a=candidate:%s", attr->value);
+ GST_LOG_OBJECT (webrtc, "adding ICE candidate with mline:%u, %s",
+ mlineindex, candidate);
+ gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, candidate);
+ g_free (candidate);
+ }
+ }
+ }
+
+ static void
+ _add_ice_candidate_to_sdp (GstWebRTCBin * webrtc,
+ GstSDPMessage * sdp, gint mline_index, const gchar * candidate)
+ {
+ GstSDPMedia *media = NULL;
+
+ if (mline_index < sdp->medias->len) {
+ media = &g_array_index (sdp->medias, GstSDPMedia, mline_index);
+ }
+
+ if (media == NULL) {
+ GST_WARNING_OBJECT (webrtc, "Couldn't find mline %d to merge ICE candidate",
+ mline_index);
+ return;
+ }
+ // Add the candidate as an attribute, first stripping off the existing
+ // candidate: key from the string description
+ if (strlen (candidate) < 10) {
+ GST_WARNING_OBJECT (webrtc,
+ "Dropping invalid ICE candidate for mline %d: %s", mline_index,
+ candidate);
+ return;
+ }
+ gst_sdp_media_add_attribute (media, "candidate", candidate + 10);
+ }
+
+ static gboolean
+ _filter_sdp_fields (GQuark field_id, const GValue * value,
+ GstStructure * new_structure)
+ {
+ if (!g_str_has_prefix (g_quark_to_string (field_id), "a-")) {
+ gst_structure_id_set_value (new_structure, field_id, value);
+ }
+ return TRUE;
+ }
+
+ static void
+ _set_rtx_ptmap_from_stream (GstWebRTCBin * webrtc, TransportStream * stream)
+ {
+ gint *rtx_pt;
+ gsize rtx_count;
+
+ rtx_pt = transport_stream_get_all_pt (stream, "RTX", &rtx_count);
+ GST_LOG_OBJECT (stream, "have %" G_GSIZE_FORMAT " rtx payloads", rtx_count);
+ if (rtx_pt) {
+ GstStructure *pt_map = gst_structure_new_empty ("application/x-rtp-pt-map");
+ gsize i;
+
+ for (i = 0; i < rtx_count; i++) {
+ GstCaps *rtx_caps = transport_stream_get_caps_for_pt (stream, rtx_pt[i]);
+ const GstStructure *s = gst_caps_get_structure (rtx_caps, 0);
+ const gchar *apt = gst_structure_get_string (s, "apt");
+
+ GST_LOG_OBJECT (stream, "setting rtx mapping: %s -> %u", apt, rtx_pt[i]);
+ gst_structure_set (pt_map, apt, G_TYPE_UINT, rtx_pt[i], NULL);
+ }
+
+ GST_DEBUG_OBJECT (stream, "setting payload map on %" GST_PTR_FORMAT " : %"
+ GST_PTR_FORMAT " and %" GST_PTR_FORMAT, stream->rtxreceive,
+ stream->rtxsend, pt_map);
+
+ if (stream->rtxreceive)
+ g_object_set (stream->rtxreceive, "payload-type-map", pt_map, NULL);
+ if (stream->rtxsend)
+ g_object_set (stream->rtxsend, "payload-type-map", pt_map, NULL);
+
+ gst_structure_free (pt_map);
++#ifdef __TIZEN__
++ g_free (rtx_pt);
++#endif
+ }
+ }
+
+ static void
+ _update_transport_ptmap_from_media (GstWebRTCBin * webrtc,
+ TransportStream * stream, const GstSDPMessage * sdp, guint media_idx)
+ {
+ guint i, len;
+ const gchar *proto;
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, media_idx);
+
+ /* get proto */
+ proto = gst_sdp_media_get_proto (media);
+ if (proto != NULL) {
+ /* Parse global SDP attributes once */
+ GstCaps *global_caps = gst_caps_new_empty_simple ("application/x-unknown");
+ GST_DEBUG_OBJECT (webrtc, "mapping sdp session level attributes to caps");
+ gst_sdp_message_attributes_to_caps (sdp, global_caps);
+ GST_DEBUG_OBJECT (webrtc, "mapping sdp media level attributes to caps");
+ gst_sdp_media_attributes_to_caps (media, global_caps);
+
+ len = gst_sdp_media_formats_len (media);
+ for (i = 0; i < len; i++) {
+ GstCaps *caps, *outcaps;
+ GstStructure *s;
+ PtMapItem item;
+ gint pt;
+ guint j;
+
+ pt = atoi (gst_sdp_media_get_format (media, i));
+
+ GST_DEBUG_OBJECT (webrtc, " looking at %d pt: %d", i, pt);
+
+ /* convert caps */
+ caps = gst_sdp_media_get_caps_from_media (media, pt);
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (webrtc, " skipping pt %d without caps", pt);
+ continue;
+ }
+
+ /* Merge in global caps */
+ /* Intersect will merge in missing fields to the current caps */
+ outcaps = gst_caps_intersect (caps, global_caps);
+ gst_caps_unref (caps);
+
+ s = gst_caps_get_structure (outcaps, 0);
+ gst_structure_set_name (s, "application/x-rtp");
+ if (!g_strcmp0 (gst_structure_get_string (s, "encoding-name"), "ULPFEC"))
+ gst_structure_set (s, "is-fec", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ item.caps = gst_caps_new_empty ();
+
+ for (j = 0; j < gst_caps_get_size (outcaps); j++) {
+ GstStructure *s = gst_caps_get_structure (outcaps, j);
+ GstStructure *filtered =
+ gst_structure_new_empty (gst_structure_get_name (s));
+
+ gst_structure_foreach (s,
+ (GstStructureForeachFunc) _filter_sdp_fields, filtered);
+ gst_caps_append_structure (item.caps, filtered);
+ }
+
+ item.pt = pt;
+ gst_caps_unref (outcaps);
+
+ g_array_append_val (stream->ptmap, item);
+ }
+
+ gst_caps_unref (global_caps);
+ }
+ }
+
+ static void
+ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc,
+ const GstSDPMessage * sdp, guint media_idx,
+ TransportStream * stream, GstWebRTCRTPTransceiver * rtp_trans,
+ GStrv bundled, guint bundle_idx, GError ** error)
+ {
+ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans);
+ GstWebRTCRTPTransceiverDirection prev_dir = rtp_trans->current_direction;
+ GstWebRTCRTPTransceiverDirection new_dir;
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, media_idx);
+ GstWebRTCDTLSSetup new_setup;
+ gboolean new_rtcp_rsize;
+ ReceiveState receive_state = RECEIVE_STATE_UNSET;
+ int i;
+
+ rtp_trans->mline = media_idx;
+
+ if (!g_strcmp0 (gst_sdp_media_get_media (media), "audio")) {
+ if (rtp_trans->kind == GST_WEBRTC_KIND_VIDEO)
+ GST_FIXME_OBJECT (webrtc,
+ "Updating video transceiver to audio, which isn't fully supported.");
+ rtp_trans->kind = GST_WEBRTC_KIND_AUDIO;
+ }
+
+ if (!g_strcmp0 (gst_sdp_media_get_media (media), "video")) {
+ if (rtp_trans->kind == GST_WEBRTC_KIND_AUDIO)
+ GST_FIXME_OBJECT (webrtc,
+ "Updating audio transceiver to video, which isn't fully supported.");
+ rtp_trans->kind = GST_WEBRTC_KIND_VIDEO;
+ }
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "mid") == 0) {
+ g_free (rtp_trans->mid);
+ rtp_trans->mid = g_strdup (attr->value);
+ }
+ }
+
+ {
+ const GstSDPMedia *local_media, *remote_media;
+ GstWebRTCRTPTransceiverDirection local_dir, remote_dir;
+ GstWebRTCDTLSSetup local_setup, remote_setup;
+
+ local_media =
+ gst_sdp_message_get_media (webrtc->current_local_description->sdp,
+ media_idx);
+ remote_media =
+ gst_sdp_message_get_media (webrtc->current_remote_description->sdp,
+ media_idx);
+
+ local_setup = _get_dtls_setup_from_media (local_media);
+ remote_setup = _get_dtls_setup_from_media (remote_media);
+ new_setup = _get_final_setup (local_setup, remote_setup);
+ if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Cannot intersect direction attributes for media %u", media_idx);
+ return;
+ }
+
+ local_dir = _get_direction_from_media (local_media);
+ remote_dir = _get_direction_from_media (remote_media);
+ new_dir = _get_final_direction (local_dir, remote_dir);
+ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Cannot intersect dtls setup attributes for media %u", media_idx);
+ return;
+ }
+
+ if (prev_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE
+ && new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE
+ && prev_dir != new_dir) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_NOT_IMPLEMENTED,
+ "transceiver direction changes are not implemented. Media %u",
+ media_idx);
+ return;
+ }
+
+ if (!bundled || bundle_idx == media_idx) {
+ new_rtcp_rsize = _media_has_attribute_key (local_media, "rtcp-rsize")
+ && _media_has_attribute_key (remote_media, "rtcp-rsize");
+
+ {
+ GObject *session;
+ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-session",
+ media_idx, &session);
+ if (session) {
+ g_object_set (session, "rtcp-reduced-size", new_rtcp_rsize, NULL);
+ g_object_unref (session);
+ }
+ }
+ }
+ }
+
+ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) {
+ if (!bundled) {
+ /* Not a bundled stream means this entire transport is inactive,
+ * so set the receive state to BLOCK below */
+ stream->active = FALSE;
+ receive_state = RECEIVE_STATE_BLOCK;
+ }
+ } else {
+ /* If this transceiver is active for sending or receiving,
+ * we still need receive at least RTCP, so need to unblock
+ * the receive bin below. */
+ GST_LOG_OBJECT (webrtc, "marking stream %p as active", stream);
+ receive_state = RECEIVE_STATE_PASS;
+ stream->active = TRUE;
+ }
+
+ if (new_dir != prev_dir) {
+ gchar *prev_dir_s, *new_dir_s;
+
+ prev_dir_s =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ prev_dir);
+ new_dir_s =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ new_dir);
+
+ GST_DEBUG_OBJECT (webrtc, "transceiver %" GST_PTR_FORMAT
+ " direction change from %s to %s", rtp_trans, prev_dir_s, new_dir_s);
+
+ g_free (prev_dir_s);
+ prev_dir_s = NULL;
+ g_free (new_dir_s);
+ new_dir_s = NULL;
+
+ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) {
+ GstWebRTCBinPad *pad;
+
+ pad = _find_pad_for_mline (webrtc, GST_PAD_SRC, media_idx);
+ if (pad) {
+ GstPad *target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad));
+ if (target) {
+ GstPad *peer = gst_pad_get_peer (target);
+ if (peer) {
+ gst_pad_send_event (peer, gst_event_new_eos ());
+ gst_object_unref (peer);
+ }
+ gst_object_unref (target);
+ }
+ gst_object_unref (pad);
+ }
+
+ /* XXX: send eos event up the sink pad as well? */
+ }
+
+ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY ||
+ new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) {
+ GstWebRTCBinPad *pad =
+ _find_pad_for_transceiver (webrtc, GST_PAD_SINK, rtp_trans);
+ if (pad) {
+ GST_DEBUG_OBJECT (webrtc, "found existing send pad %" GST_PTR_FORMAT
+ " for transceiver %" GST_PTR_FORMAT, pad, trans);
+ gst_object_unref (pad);
+ } else {
+ GST_DEBUG_OBJECT (webrtc,
+ "creating new send pad for transceiver %" GST_PTR_FORMAT, trans);
+ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, rtp_trans,
+ G_MAXUINT);
+ _connect_input_stream (webrtc, pad);
+ _add_pad (webrtc, pad);
+ }
+ }
+ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY ||
+ new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) {
+ GstWebRTCBinPad *pad =
+ _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans);
+ if (pad) {
+ GST_DEBUG_OBJECT (webrtc, "found existing receive pad %" GST_PTR_FORMAT
+ " for transceiver %" GST_PTR_FORMAT, pad, trans);
+ gst_object_unref (pad);
+ } else {
+ GST_DEBUG_OBJECT (webrtc,
+ "creating new receive pad for transceiver %" GST_PTR_FORMAT, trans);
+ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SRC, rtp_trans,
+ G_MAXUINT);
+
+ if (!trans->stream) {
+ TransportStream *item;
+
+ item =
+ _get_or_create_transport_stream (webrtc,
+ bundled ? bundle_idx : media_idx, FALSE);
+ webrtc_transceiver_set_transport (trans, item);
+ }
+
+ _connect_output_stream (webrtc, trans->stream,
+ bundled ? bundle_idx : media_idx);
+ /* delay adding the pad until rtpbin creates the recv output pad
+ * to ghost to so queries/events travel through the pipeline correctly
+ * as soon as the pad is added */
+ _add_pad_to_list (webrtc, pad);
+ }
+
+ }
+
+ rtp_trans->mline = media_idx;
+ rtp_trans->current_direction = new_dir;
+ }
+
+ if (!bundled || bundle_idx == media_idx) {
+ if (stream->rtxsend || stream->rtxreceive) {
+ _set_rtx_ptmap_from_stream (webrtc, stream);
+ }
+
+ g_object_set (stream, "dtls-client",
+ new_setup == GST_WEBRTC_DTLS_SETUP_ACTIVE, NULL);
+ }
+
+ /* Must be after setting the "dtls-client" so that data is not pushed into
+ * the dtlssrtp elements before the ssl direction has been set which will
+ * throw SSL errors */
+ if (receive_state != RECEIVE_STATE_UNSET)
+ transport_receive_bin_set_receive_state (stream->receive_bin,
+ receive_state);
+ }
+
+ /* must be called with the pc lock held */
+ static gint
+ _generate_data_channel_id (GstWebRTCBin * webrtc)
+ {
+ gboolean is_client;
+ gint new_id = -1, max_channels = 0;
+
+ if (webrtc->priv->sctp_transport) {
+ g_object_get (webrtc->priv->sctp_transport, "max-channels", &max_channels,
+ NULL);
+ }
+ if (max_channels <= 0) {
+ max_channels = 65534;
+ }
+
+ g_object_get (webrtc->priv->sctp_transport->transport, "client", &is_client,
+ NULL);
+
+ /* TODO: a better search algorithm */
+ do {
+ WebRTCDataChannel *channel;
+
+ new_id++;
+
+ if (new_id < 0 || new_id >= max_channels) {
+ /* exhausted id space */
+ GST_WARNING_OBJECT (webrtc, "Could not find a suitable "
+ "data channel id (max %i)", max_channels);
+ return -1;
+ }
+
+ /* client must generate even ids, server must generate odd ids */
+ if (new_id % 2 == ! !is_client)
+ continue;
+
+ channel = _find_data_channel_for_id (webrtc, new_id);
+ if (!channel)
+ break;
+ } while (TRUE);
+
+ return new_id;
+ }
+
+ static void
+ _update_data_channel_from_sdp_media (GstWebRTCBin * webrtc,
+ const GstSDPMessage * sdp, guint media_idx, TransportStream * stream,
+ GError ** error)
+ {
+ const GstSDPMedia *local_media, *remote_media;
+ GstWebRTCDTLSSetup local_setup, remote_setup, new_setup;
+ TransportReceiveBin *receive;
+ int local_port, remote_port;
+ guint64 local_max_size, remote_max_size, max_size;
+ int i;
+
+ local_media =
+ gst_sdp_message_get_media (webrtc->current_local_description->sdp,
+ media_idx);
+ remote_media =
+ gst_sdp_message_get_media (webrtc->current_remote_description->sdp,
+ media_idx);
+
+ local_setup = _get_dtls_setup_from_media (local_media);
+ remote_setup = _get_dtls_setup_from_media (remote_media);
+ new_setup = _get_final_setup (local_setup, remote_setup);
+ if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Cannot intersect dtls setup for media %u", media_idx);
+ return;
+ }
+
+ /* data channel is always rtcp-muxed to avoid generating ICE candidates
+ * for RTCP */
+ g_object_set (stream, "dtls-client",
+ new_setup == GST_WEBRTC_DTLS_SETUP_ACTIVE, NULL);
+
+ local_port = _get_sctp_port_from_media (local_media);
+ remote_port = _get_sctp_port_from_media (local_media);
+ if (local_port == -1 || remote_port == -1) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Could not find sctp port for media %u (local %i, remote %i)",
+ media_idx, local_port, remote_port);
+ return;
+ }
+
+ if (0 == (local_max_size =
+ _get_sctp_max_message_size_from_media (local_media)))
+ local_max_size = G_MAXUINT64;
+ if (0 == (remote_max_size =
+ _get_sctp_max_message_size_from_media (remote_media)))
+ remote_max_size = G_MAXUINT64;
+ max_size = MIN (local_max_size, remote_max_size);
+
+ webrtc->priv->sctp_transport->max_message_size = max_size;
+
+ {
+ guint orig_local_port, orig_remote_port;
+
+ /* XXX: sctpassociation warns if we are in the wrong state */
+ g_object_get (webrtc->priv->sctp_transport->sctpdec, "local-sctp-port",
+ &orig_local_port, NULL);
+
+ if (orig_local_port != local_port)
+ g_object_set (webrtc->priv->sctp_transport->sctpdec, "local-sctp-port",
+ local_port, NULL);
+
+ g_object_get (webrtc->priv->sctp_transport->sctpenc, "remote-sctp-port",
+ &orig_remote_port, NULL);
+ if (orig_remote_port != remote_port)
+ g_object_set (webrtc->priv->sctp_transport->sctpenc, "remote-sctp-port",
+ remote_port, NULL);
+ }
+
+ DC_LOCK (webrtc);
+ for (i = 0; i < webrtc->priv->data_channels->len; i++) {
+ WebRTCDataChannel *channel;
+
+ channel = g_ptr_array_index (webrtc->priv->data_channels, i);
+
+ if (channel->parent.id == -1)
+ channel->parent.id = _generate_data_channel_id (webrtc);
+ if (channel->parent.id == -1)
+ GST_ELEMENT_WARNING (webrtc, RESOURCE, NOT_FOUND,
+ ("%s", "Failed to generate an identifier for a data channel"), NULL);
+
+ if (webrtc->priv->sctp_transport->association_established
+ && !channel->parent.negotiated && !channel->opened) {
+ webrtc_data_channel_link_to_sctp (channel, webrtc->priv->sctp_transport);
+ webrtc_data_channel_start_negotiation (channel);
+ }
+ }
+ DC_UNLOCK (webrtc);
+
+ stream->active = TRUE;
+
+ receive = TRANSPORT_RECEIVE_BIN (stream->receive_bin);
+ transport_receive_bin_set_receive_state (receive, RECEIVE_STATE_PASS);
+ }
+
+ static gboolean
+ _find_compatible_unassociated_transceiver (GstWebRTCRTPTransceiver * p1,
+ gconstpointer data)
+ {
+ GstWebRTCKind kind = GPOINTER_TO_INT (data);
+
+ if (p1->mid)
+ return FALSE;
+ if (p1->mline != -1)
+ return FALSE;
+ if (p1->stopped)
+ return FALSE;
+ if (p1->kind != GST_WEBRTC_KIND_UNKNOWN && p1->kind != kind)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static void
+ _connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id)
+ {
+ gchar *pad_name;
+ GstPad *queue_srcpad;
+ GstPad *rtp_sink;
+ TransportStream *stream = _find_transport_for_session (webrtc, session_id);
+ GstElement *queue;
+
+ g_assert (stream);
+
+ if (webrtc->rtpfunnel)
+ goto done;
+
+ webrtc->rtpfunnel = gst_element_factory_make ("rtpfunnel", NULL);
+ gst_bin_add (GST_BIN (webrtc), webrtc->rtpfunnel);
+ gst_element_sync_state_with_parent (webrtc->rtpfunnel);
+
+ queue = gst_element_factory_make ("queue", NULL);
+ gst_bin_add (GST_BIN (webrtc), queue);
+ gst_element_sync_state_with_parent (queue);
+
+ gst_element_link (webrtc->rtpfunnel, queue);
+
+ queue_srcpad = gst_element_get_static_pad (queue, "src");
+
+ pad_name = g_strdup_printf ("send_rtp_sink_%d", session_id);
+ rtp_sink = gst_element_request_pad_simple (webrtc->rtpbin, pad_name);
+ g_free (pad_name);
+ gst_pad_link (queue_srcpad, rtp_sink);
+ gst_object_unref (queue_srcpad);
+ gst_object_unref (rtp_sink);
+
+ pad_name = g_strdup_printf ("send_rtp_src_%d", session_id);
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ if (webrtc->priv->netsim) {
++ _insert_netsim_element_between (webrtc, GST_ELEMENT (webrtc->rtpbin), pad_name,
++ GST_ELEMENT (stream->send_bin), "rtp_sink", session_id);
++ } else {
++#endif
+ if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name,
+ GST_ELEMENT (stream->send_bin), "rtp_sink"))
+ g_warn_if_reached ();
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ }
++#endif
+ g_free (pad_name);
+
+ done:
+ return;
+ }
+
+ static gboolean
+ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source,
+ GstWebRTCSessionDescription * sdp, GError ** error)
+ {
+ int i;
+ gboolean ret = FALSE;
+ GStrv bundled = NULL;
+ guint bundle_idx = 0;
+ TransportStream *bundle_stream = NULL;
+
+ /* FIXME: With some peers, it's possible we could have
+ * multiple bundles to deal with, although I've never seen one yet */
+ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE)
+ if (!_parse_bundle (sdp->sdp, &bundled, error))
+ goto done;
+
+ if (bundled) {
+
+ if (!_get_bundle_index (sdp->sdp, bundled, &bundle_idx)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Bundle tag is %s but no media found matching", bundled[0]);
+ goto done;
+ }
+
+ bundle_stream = _get_or_create_transport_stream (webrtc, bundle_idx,
+ _message_media_is_datachannel (sdp->sdp, bundle_idx));
+ /* Mark the bundle stream as inactive to start. It will be set to TRUE
+ * by any bundled mline that is active, and at the end we set the
+ * receivebin to BLOCK if all mlines were inactive. */
+ bundle_stream->active = FALSE;
+
+ g_array_set_size (bundle_stream->ptmap, 0);
+ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) {
+ /* When bundling, we need to do this up front, or else RTX
+ * parameters aren't set up properly for the bundled streams */
+ _update_transport_ptmap_from_media (webrtc, bundle_stream, sdp->sdp, i);
+ }
+
+ _connect_rtpfunnel (webrtc, bundle_idx);
+ }
+
+ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i);
+ TransportStream *stream;
+ GstWebRTCRTPTransceiver *trans;
+ guint transport_idx;
+
+ /* skip rejected media */
+ if (gst_sdp_media_get_port (media) == 0)
+ continue;
+
+ if (bundled)
+ transport_idx = bundle_idx;
+ else
+ transport_idx = i;
+
+ trans = _find_transceiver_for_sdp_media (webrtc, sdp->sdp, i);
+
+ stream = _get_or_create_transport_stream (webrtc, transport_idx,
+ _message_media_is_datachannel (sdp->sdp, transport_idx));
+ if (!bundled) {
+ /* When bundling, these were all set up above, but when not
+ * bundling we need to do it now */
+ g_array_set_size (stream->ptmap, 0);
+ _update_transport_ptmap_from_media (webrtc, stream, sdp->sdp, i);
+ }
+
+ if (trans)
+ webrtc_transceiver_set_transport ((WebRTCTransceiver *) trans, stream);
+
+ if (source == SDP_LOCAL && sdp->type == GST_WEBRTC_SDP_TYPE_OFFER && !trans) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "State mismatch. Could not find local transceiver by mline %u", i);
+ goto done;
+ } else {
+ if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0 ||
+ g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) {
+ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN;
+
+ /* No existing transceiver, find an unused one */
+ if (!trans) {
+ if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0)
+ kind = GST_WEBRTC_KIND_AUDIO;
+ else if (g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0)
+ kind = GST_WEBRTC_KIND_VIDEO;
+ else
+ GST_LOG_OBJECT (webrtc, "Unknown media kind %s",
+ GST_STR_NULL (gst_sdp_media_get_media (media)));
+
+ trans = _find_transceiver (webrtc, GINT_TO_POINTER (kind),
+ (FindTransceiverFunc) _find_compatible_unassociated_transceiver);
+ }
+
+ /* Still no transceiver? Create one */
+ /* XXX: default to the advertised direction in the sdp for new
+ * transceivers. The spec doesn't actually say what happens here, only
+ * that calls to setDirection will change the value. Nothing about
+ * a default value when the transceiver is created internally */
+ if (!trans) {
+ WebRTCTransceiver *t = _create_webrtc_transceiver (webrtc,
+ _get_direction_from_media (media), i, kind, NULL);
+ webrtc_transceiver_set_transport (t, stream);
+ trans = GST_WEBRTC_RTP_TRANSCEIVER (t);
+ }
+
+ _update_transceiver_from_sdp_media (webrtc, sdp->sdp, i, stream,
+ trans, bundled, bundle_idx, error);
+ if (error && *error)
+ goto done;
+ } else if (_message_media_is_datachannel (sdp->sdp, i)) {
+ _update_data_channel_from_sdp_media (webrtc, sdp->sdp, i, stream,
+ error);
+ if (error && *error)
+ goto done;
+ } else {
+ GST_ERROR_OBJECT (webrtc, "Unknown media type in SDP at index %u", i);
+ }
+ }
+ }
+
+ if (bundle_stream && bundle_stream->active == FALSE) {
+ /* No bundled mline marked the bundle as active, so block the receive bin, as
+ * this bundle is completely inactive */
+ GST_LOG_OBJECT (webrtc,
+ "All mlines in bundle %u are inactive. Blocking receiver", bundle_idx);
+ transport_receive_bin_set_receive_state (bundle_stream->receive_bin,
+ RECEIVE_STATE_BLOCK);
+ }
+
+ ret = TRUE;
+
+ done:
+ g_strfreev (bundled);
+
+ return ret;
+ }
+
+ static gboolean
+ check_transceivers_not_removed (GstWebRTCBin * webrtc,
+ GstWebRTCSessionDescription * previous, GstWebRTCSessionDescription * new)
+ {
+ if (!previous)
+ return TRUE;
+
+ if (gst_sdp_message_medias_len (previous->sdp) >
+ gst_sdp_message_medias_len (new->sdp))
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ check_locked_mlines (GstWebRTCBin * webrtc, GstWebRTCSessionDescription * sdp,
+ GError ** error)
+ {
+ guint i;
+
+ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i);
+ GstWebRTCRTPTransceiver *rtp_trans;
+ WebRTCTransceiver *trans;
+
+ rtp_trans = _find_transceiver_for_sdp_media (webrtc, sdp->sdp, i);
+ /* only look for matching mid */
+ if (rtp_trans == NULL)
+ continue;
+
+ trans = WEBRTC_TRANSCEIVER (rtp_trans);
+
+ /* We only validate the locked mlines for now */
+ if (!trans->mline_locked)
+ continue;
+
+ if (rtp_trans->mline != i) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_IMPOSSIBLE_MLINE_RESTRICTION,
+ "m-line with mid %s is at position %d, but was locked to %d, "
+ "rejecting", rtp_trans->mid, i, rtp_trans->mline);
+ return FALSE;
+ }
+
+ if (rtp_trans->kind != GST_WEBRTC_KIND_UNKNOWN) {
+ if (!g_strcmp0 (gst_sdp_media_get_media (media), "audio") &&
+ rtp_trans->kind != GST_WEBRTC_KIND_AUDIO) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_IMPOSSIBLE_MLINE_RESTRICTION,
+ "m-line %d was locked to audio, but SDP has %s media", i,
+ gst_sdp_media_get_media (media));
+ return FALSE;
+ }
+
+ if (!g_strcmp0 (gst_sdp_media_get_media (media), "video") &&
+ rtp_trans->kind != GST_WEBRTC_KIND_VIDEO) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_IMPOSSIBLE_MLINE_RESTRICTION,
+ "m-line %d was locked to video, but SDP has %s media", i,
+ gst_sdp_media_get_media (media));
+ return FALSE;
+ }
+ }
+ }
+
+ return TRUE;
+ }
+
+
+ struct set_description
+ {
+ SDPSource source;
+ GstWebRTCSessionDescription *sdp;
+ };
+
+ static GstWebRTCSessionDescription *
+ get_previous_description (GstWebRTCBin * webrtc, SDPSource source,
+ GstWebRTCSDPType type)
+ {
+ switch (type) {
+ case GST_WEBRTC_SDP_TYPE_OFFER:
+ case GST_WEBRTC_SDP_TYPE_PRANSWER:
+ case GST_WEBRTC_SDP_TYPE_ANSWER:
+ if (source == SDP_LOCAL) {
+ return webrtc->current_local_description;
+ } else {
+ return webrtc->current_remote_description;
+ }
+ case GST_WEBRTC_SDP_TYPE_ROLLBACK:
+ return NULL;
+ default:
+ /* other values mean memory corruption/uninitialized! */
+ g_assert_not_reached ();
+ break;
+ }
+
+ return NULL;
+ }
+
+ /* http://w3c.github.io/webrtc-pc/#set-description */
+ static GstStructure *
+ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd)
+ {
+ GstWebRTCSignalingState new_signaling_state = webrtc->signaling_state;
+ gboolean signalling_state_changed = FALSE;
+ GError *error = NULL;
+ GStrv bundled = NULL;
+ guint bundle_idx = 0;
+ guint i;
+
+ {
+ gchar *state = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE,
+ webrtc->signaling_state);
+ gchar *type_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_SDP_TYPE, sd->sdp->type);
+ gchar *sdp_text = gst_sdp_message_as_text (sd->sdp->sdp);
+ GST_INFO_OBJECT (webrtc, "Attempting to set %s %s in the %s state",
+ _sdp_source_to_string (sd->source), type_str, state);
+ GST_TRACE_OBJECT (webrtc, "SDP contents\n%s", sdp_text);
+ g_free (sdp_text);
+ g_free (state);
+ g_free (type_str);
+ }
+
+ if (!validate_sdp (webrtc->signaling_state, sd->source, sd->sdp, &error))
+ goto out;
+
+ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE)
+ if (!_parse_bundle (sd->sdp->sdp, &bundled, &error))
+ goto out;
+
+ if (bundled) {
+ if (!_get_bundle_index (sd->sdp->sdp, bundled, &bundle_idx)) {
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Bundle tag is %s but no matching media found", bundled[0]);
+ goto out;
+ }
+ }
+
+ if (!check_transceivers_not_removed (webrtc,
+ get_previous_description (webrtc, sd->source, sd->sdp->type),
+ sd->sdp)) {
+ g_set_error_literal (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "m=lines removed from the SDP. Processing a completely new connection "
+ "is not currently supported.");
+ goto out;
+ }
+
+ if (!check_locked_mlines (webrtc, sd->sdp, &error))
+ goto out;
+
+ switch (sd->sdp->type) {
+ case GST_WEBRTC_SDP_TYPE_OFFER:{
+ if (sd->source == SDP_LOCAL) {
+ if (webrtc->pending_local_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_local_description);
+ webrtc->pending_local_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_HAVE_LOCAL_OFFER;
+ } else {
+ if (webrtc->pending_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_remote_description);
+ webrtc->pending_remote_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_HAVE_REMOTE_OFFER;
+ }
+ break;
+ }
+ case GST_WEBRTC_SDP_TYPE_ANSWER:{
+ if (sd->source == SDP_LOCAL) {
+ if (webrtc->current_local_description)
+ gst_webrtc_session_description_free
+ (webrtc->current_local_description);
+ webrtc->current_local_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+
+ if (webrtc->current_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->current_remote_description);
+ webrtc->current_remote_description = webrtc->pending_remote_description;
+ webrtc->pending_remote_description = NULL;
+ } else {
+ if (webrtc->current_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->current_remote_description);
+ webrtc->current_remote_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+
+ if (webrtc->current_local_description)
+ gst_webrtc_session_description_free
+ (webrtc->current_local_description);
+ webrtc->current_local_description = webrtc->pending_local_description;
+ webrtc->pending_local_description = NULL;
+ }
+
+ if (webrtc->pending_local_description)
+ gst_webrtc_session_description_free (webrtc->pending_local_description);
+ webrtc->pending_local_description = NULL;
+
+ if (webrtc->pending_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_remote_description);
+ webrtc->pending_remote_description = NULL;
+
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_STABLE;
+ break;
+ }
+ case GST_WEBRTC_SDP_TYPE_ROLLBACK:{
+ GST_FIXME_OBJECT (webrtc, "rollbacks are completely untested");
+ if (sd->source == SDP_LOCAL) {
+ if (webrtc->pending_local_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_local_description);
+ webrtc->pending_local_description = NULL;
+ } else {
+ if (webrtc->pending_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_remote_description);
+ webrtc->pending_remote_description = NULL;
+ }
+
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_STABLE;
+ break;
+ }
+ case GST_WEBRTC_SDP_TYPE_PRANSWER:{
+ GST_FIXME_OBJECT (webrtc, "pranswers are completely untested");
+ if (sd->source == SDP_LOCAL) {
+ if (webrtc->pending_local_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_local_description);
+ webrtc->pending_local_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_HAVE_LOCAL_PRANSWER;
+ } else {
+ if (webrtc->pending_remote_description)
+ gst_webrtc_session_description_free
+ (webrtc->pending_remote_description);
+ webrtc->pending_remote_description =
+ gst_webrtc_session_description_copy (sd->sdp);
+
+ new_signaling_state = GST_WEBRTC_SIGNALING_STATE_HAVE_REMOTE_PRANSWER;
+ }
+ break;
+ }
+ }
+
+ if (sd->sdp->type == GST_WEBRTC_SDP_TYPE_ROLLBACK) {
+ /* FIXME:
+ * If the mid value of an RTCRtpTransceiver was set to a non-null value
+ * by the RTCSessionDescription that is being rolled back, set the mid
+ * value of that transceiver to null, as described by [JSEP]
+ * (section 4.1.7.2.).
+ * If an RTCRtpTransceiver was created by applying the
+ * RTCSessionDescription that is being rolled back, and a track has not
+ * been attached to it via addTrack, remove that transceiver from
+ * connection's set of transceivers, as described by [JSEP]
+ * (section 4.1.7.2.).
+ * Restore the value of connection's [[ sctpTransport]] internal slot
+ * to its value at the last stable signaling state.
+ */
+ }
+
+ if (webrtc->signaling_state != new_signaling_state) {
+ webrtc->signaling_state = new_signaling_state;
+ signalling_state_changed = TRUE;
+ }
+
+ {
+ gboolean ice_controller = FALSE;
+
+ /* get the current value so we don't change ice controller from TRUE to
+ * FALSE on renegotiation or once set to TRUE for the initial local offer */
+ ice_controller = gst_webrtc_ice_get_is_controller (webrtc->priv->ice);
+
+ /* we control ice negotiation if we send the initial offer */
+ ice_controller |=
+ new_signaling_state == GST_WEBRTC_SIGNALING_STATE_HAVE_LOCAL_OFFER
+ && webrtc->current_remote_description == NULL;
+ /* or, if the remote is an ice-lite peer */
+ ice_controller |= new_signaling_state == GST_WEBRTC_SIGNALING_STATE_STABLE
+ && webrtc->current_remote_description
+ && _message_has_attribute_key (webrtc->current_remote_description->sdp,
+ "ice-lite");
+
+ GST_DEBUG_OBJECT (webrtc, "we are in ice controlling mode: %s",
+ ice_controller ? "true" : "false");
+ gst_webrtc_ice_set_is_controller (webrtc->priv->ice, ice_controller);
+ }
+
+ if (new_signaling_state == GST_WEBRTC_SIGNALING_STATE_STABLE) {
+ GList *tmp;
+
+ /* media modifications */
+ if (!_update_transceivers_from_sdp (webrtc, sd->source, sd->sdp, &error))
+ goto out;
+
+ for (tmp = webrtc->priv->pending_sink_transceivers; tmp;) {
+ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (tmp->data);
+ GstWebRTCRTPTransceiverDirection new_dir;
+ GList *old = tmp;
+ const GstSDPMedia *media;
+
+ if (!pad->received_caps) {
+ GST_LOG_OBJECT (pad, "has not received any caps yet. Skipping.");
+ tmp = tmp->next;
+ continue;
+ }
+
+ if (pad->trans->mline >= gst_sdp_message_medias_len (sd->sdp->sdp)) {
+ GST_DEBUG_OBJECT (pad, "not mentioned in this description. Skipping");
+ tmp = tmp->next;
+ continue;
+ }
+
+ media = gst_sdp_message_get_media (sd->sdp->sdp, pad->trans->mline);
+ /* skip rejected media */
+ if (gst_sdp_media_get_port (media) == 0) {
+ /* FIXME: arrange for an appropriate flow return */
+ GST_FIXME_OBJECT (pad, "Media has been rejected. Need to arrange for "
+ "a more correct flow return.");
+ tmp = tmp->next;
+ continue;
+ }
+
+ if (!pad->trans) {
+ GST_LOG_OBJECT (pad, "doesn't have a transceiver");
+ tmp = tmp->next;
+ continue;
+ }
+
+ new_dir = pad->trans->direction;
+ if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY &&
+ new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) {
+ GST_LOG_OBJECT (pad, "transceiver %" GST_PTR_FORMAT " is not sending "
+ "data at the moment. Not connecting input stream yet", pad->trans);
+ tmp = tmp->next;
+ continue;
+ }
+
+ GST_LOG_OBJECT (pad, "Connecting input stream to rtpbin with "
+ "transceiver %" GST_PTR_FORMAT " and caps %" GST_PTR_FORMAT,
+ pad->trans, pad->received_caps);
+ _connect_input_stream (webrtc, pad);
+ gst_pad_remove_probe (GST_PAD (pad), pad->block_id);
+ pad->block_id = 0;
+
+ tmp = tmp->next;
+ gst_object_unref (old->data);
+ webrtc->priv->pending_sink_transceivers =
+ g_list_delete_link (webrtc->priv->pending_sink_transceivers, old);
+ }
+ }
+
+ for (i = 0; i < gst_sdp_message_medias_len (sd->sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sd->sdp->sdp, i);
+ gchar *ufrag, *pwd;
+ TransportStream *item;
+
+ item =
+ _get_or_create_transport_stream (webrtc, bundled ? bundle_idx : i,
+ _message_media_is_datachannel (sd->sdp->sdp, bundled ? bundle_idx : i));
+
+ if (sd->source == SDP_REMOTE) {
+ guint j;
+
+ for (j = 0; j < gst_sdp_media_attributes_len (media); j++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, j);
+
+ if (g_strcmp0 (attr->key, "ssrc") == 0) {
+ GStrv split = g_strsplit (attr->value, " ", 0);
+ guint32 ssrc;
+
+ if (split[0] && sscanf (split[0], "%u", &ssrc) && split[1]
+ && g_str_has_prefix (split[1], "cname:")) {
+ g_ptr_array_add (item->remote_ssrcmap, ssrcmap_item_new (ssrc, i));
+ }
+ g_strfreev (split);
+ }
+ }
+ }
+
+ if (sd->source == SDP_LOCAL && (!bundled || bundle_idx == i)) {
+ _get_ice_credentials_from_sdp_media (sd->sdp->sdp, i, &ufrag, &pwd);
+
+ gst_webrtc_ice_set_local_credentials (webrtc->priv->ice,
+ item->stream, ufrag, pwd);
+ g_free (ufrag);
+ g_free (pwd);
+ } else if (sd->source == SDP_REMOTE && !_media_is_bundle_only (media)) {
+ _get_ice_credentials_from_sdp_media (sd->sdp->sdp, i, &ufrag, &pwd);
+
+ gst_webrtc_ice_set_remote_credentials (webrtc->priv->ice,
+ item->stream, ufrag, pwd);
+ g_free (ufrag);
+ g_free (pwd);
+ }
+ }
+
+ if (sd->source == SDP_LOCAL) {
+ for (i = 0; i < webrtc->priv->ice_stream_map->len; i++) {
+ IceStreamItem *item =
+ &g_array_index (webrtc->priv->ice_stream_map, IceStreamItem, i);
+
+ gst_webrtc_ice_gather_candidates (webrtc->priv->ice, item->stream);
+ }
+ }
+
+ /* Add any pending trickle ICE candidates if we have both offer and answer */
+ if (webrtc->current_local_description && webrtc->current_remote_description) {
+ int i;
+
+ GstWebRTCSessionDescription *remote_sdp =
+ webrtc->current_remote_description;
+
+ /* Add any remote ICE candidates from the remote description to
+ * support non-trickle peers first */
+ for (i = 0; i < gst_sdp_message_medias_len (remote_sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (remote_sdp->sdp, i);
+ _add_ice_candidates_from_sdp (webrtc, i, media);
+ }
+
+ ICE_LOCK (webrtc);
+ for (i = 0; i < webrtc->priv->pending_remote_ice_candidates->len; i++) {
+ IceCandidateItem *item =
+ &g_array_index (webrtc->priv->pending_remote_ice_candidates,
+ IceCandidateItem, i);
+
+ _add_ice_candidate (webrtc, item, TRUE);
+ }
+ g_array_set_size (webrtc->priv->pending_remote_ice_candidates, 0);
+ ICE_UNLOCK (webrtc);
+ }
+
+ /*
+ * If connection's signaling state changed above, fire an event named
+ * signalingstatechange at connection.
+ */
+ if (signalling_state_changed) {
+ gchar *from = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE,
+ webrtc->signaling_state);
+ gchar *to = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE,
+ new_signaling_state);
+ GST_TRACE_OBJECT (webrtc, "notify signaling-state from %s "
+ "to %s", from, to);
+ PC_UNLOCK (webrtc);
+ g_object_notify (G_OBJECT (webrtc), "signaling-state");
+ PC_LOCK (webrtc);
+
+ g_free (from);
+ g_free (to);
+ }
+
+ if (webrtc->signaling_state == GST_WEBRTC_SIGNALING_STATE_STABLE) {
+ gboolean prev_need_negotiation = webrtc->priv->need_negotiation;
+
+ /* If connection's signaling state is now stable, update the
+ * negotiation-needed flag. If connection's [[ needNegotiation]] slot
+ * was true both before and after this update, queue a task to check
+ * connection's [[needNegotiation]] slot and, if still true, fire a
+ * simple event named negotiationneeded at connection.*/
+ _update_need_negotiation (webrtc);
+ if (prev_need_negotiation && webrtc->priv->need_negotiation) {
+ _check_need_negotiation_task (webrtc, NULL);
+ }
+ }
+
+ out:
+ g_strfreev (bundled);
+
+ if (error) {
+ GstStructure *s = gst_structure_new ("application/x-gstwebrtcbin-error",
+ "error", G_TYPE_ERROR, error, NULL);
+ GST_WARNING_OBJECT (webrtc, "returning error: %s", error->message);
+ g_clear_error (&error);
+ return s;
+ } else {
+ return NULL;
+ }
+ }
+
+ static void
+ _free_set_description_data (struct set_description *sd)
+ {
+ if (sd->sdp)
+ gst_webrtc_session_description_free (sd->sdp);
+ g_free (sd);
+ }
+
+ static void
+ gst_webrtc_bin_set_remote_description (GstWebRTCBin * webrtc,
+ GstWebRTCSessionDescription * remote_sdp, GstPromise * promise)
+ {
+ struct set_description *sd;
+
+ if (remote_sdp == NULL)
+ goto bad_input;
+ if (remote_sdp->sdp == NULL)
+ goto bad_input;
+
+ sd = g_new0 (struct set_description, 1);
+ sd->source = SDP_REMOTE;
+ sd->sdp = gst_webrtc_session_description_copy (remote_sdp);
+
+ if (!gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _set_description_task, sd,
+ (GDestroyNotify) _free_set_description_data, promise)) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "Could not set remote description. webrtcbin is closed.");
+ GstStructure *s =
+ gst_structure_new ("application/x-gstwebrtcbin-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (promise, s);
+
+ g_clear_error (&error);
+ }
+
+ return;
+
+ bad_input:
+ {
+ gst_promise_reply (promise, NULL);
+ g_return_if_reached ();
+ }
+ }
+
+ static void
+ gst_webrtc_bin_set_local_description (GstWebRTCBin * webrtc,
+ GstWebRTCSessionDescription * local_sdp, GstPromise * promise)
+ {
+ struct set_description *sd;
+
+ if (local_sdp == NULL)
+ goto bad_input;
+ if (local_sdp->sdp == NULL)
+ goto bad_input;
+
+ sd = g_new0 (struct set_description, 1);
+ sd->source = SDP_LOCAL;
+ sd->sdp = gst_webrtc_session_description_copy (local_sdp);
+
+ if (!gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _set_description_task, sd,
+ (GDestroyNotify) _free_set_description_data, promise)) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "Could not set remote description. webrtcbin is closed");
+ GstStructure *s =
+ gst_structure_new ("application/x-gstwebrtcbin-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (promise, s);
+
+ g_clear_error (&error);
+ }
+
+ return;
+
+ bad_input:
+ {
+ gst_promise_reply (promise, NULL);
+ g_return_if_reached ();
+ }
+ }
+
+ static GstStructure *
+ _add_ice_candidate_task (GstWebRTCBin * webrtc, IceCandidateItem * item)
+ {
+ if (!webrtc->current_local_description || !webrtc->current_remote_description) {
+ IceCandidateItem new;
+ new.mlineindex = item->mlineindex;
+ new.candidate = g_steal_pointer (&item->candidate);
+
+ ICE_LOCK (webrtc);
+ g_array_append_val (webrtc->priv->pending_remote_ice_candidates, new);
+ ICE_UNLOCK (webrtc);
+ } else {
+ _add_ice_candidate (webrtc, item, FALSE);
+ }
+
+ return NULL;
+ }
+
+ static void
+ _free_ice_candidate_item (IceCandidateItem * item)
+ {
+ _clear_ice_candidate_item (item);
+ g_free (item);
+ }
+
+ static void
+ gst_webrtc_bin_add_ice_candidate (GstWebRTCBin * webrtc, guint mline,
+ const gchar * attr)
+ {
+ IceCandidateItem *item;
+
+ item = g_new0 (IceCandidateItem, 1);
+ item->mlineindex = mline;
+ if (attr && attr[0] != 0) {
+ if (!g_ascii_strncasecmp (attr, "a=candidate:", 12))
+ item->candidate = g_strdup (attr);
+ else if (!g_ascii_strncasecmp (attr, "candidate:", 10))
+ item->candidate = g_strdup_printf ("a=%s", attr);
+ }
+ gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _add_ice_candidate_task, item,
+ (GDestroyNotify) _free_ice_candidate_item, NULL);
+ }
+
+ static GstStructure *
+ _on_local_ice_candidate_task (GstWebRTCBin * webrtc)
+ {
+ gsize i;
+ GArray *items;
+
+ ICE_LOCK (webrtc);
+ if (webrtc->priv->pending_local_ice_candidates->len == 0) {
+ ICE_UNLOCK (webrtc);
+ GST_LOG_OBJECT (webrtc, "No ICE candidates to process right now");
+ return NULL; /* Nothing to process */
+ }
+ /* Take the array so we can process it all and free it later
+ * without holding the lock
+ * FIXME: When we depend on GLib 2.64, we can use g_array_steal()
+ * here */
+ items = webrtc->priv->pending_local_ice_candidates;
+ /* Replace with a new array */
+ webrtc->priv->pending_local_ice_candidates =
+ g_array_new (FALSE, TRUE, sizeof (IceCandidateItem));
+ g_array_set_clear_func (webrtc->priv->pending_local_ice_candidates,
+ (GDestroyNotify) _clear_ice_candidate_item);
+ ICE_UNLOCK (webrtc);
+
+ for (i = 0; i < items->len; i++) {
+ IceCandidateItem *item = &g_array_index (items, IceCandidateItem, i);
+ const gchar *cand = item->candidate;
+
+ if (!g_ascii_strncasecmp (cand, "a=candidate:", 12)) {
+ /* stripping away "a=" */
+ cand += 2;
+ }
+
+ GST_TRACE_OBJECT (webrtc, "produced ICE candidate for mline:%u and %s",
+ item->mlineindex, cand);
+
+ /* First, merge this ice candidate into the appropriate mline
+ * in the local-description SDP.
+ * Second, emit the on-ice-candidate signal for the app.
+ *
+ * FIXME: This ICE candidate should be stored somewhere with
+ * the associated mid and also merged back into any subsequent
+ * local descriptions on renegotiation */
+ if (webrtc->current_local_description)
+ _add_ice_candidate_to_sdp (webrtc, webrtc->current_local_description->sdp,
+ item->mlineindex, cand);
+ if (webrtc->pending_local_description)
+ _add_ice_candidate_to_sdp (webrtc, webrtc->pending_local_description->sdp,
+ item->mlineindex, cand);
+
+ PC_UNLOCK (webrtc);
+ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_ICE_CANDIDATE_SIGNAL],
+ 0, item->mlineindex, cand);
+ PC_LOCK (webrtc);
+
+ }
+ g_array_free (items, TRUE);
+
++#ifdef __TIZEN__
++ if (webrtc->pending_ice_gathering_state == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
++ _update_and_notify_ice_gathering_state (webrtc, GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE);
++ webrtc->pending_ice_gathering_state = GST_WEBRTC_ICE_GATHERING_STATE_NEW;
++ }
++#endif
++
+ return NULL;
+ }
+
+ static void
+ _on_local_ice_candidate_cb (GstWebRTCICE * ice, guint session_id,
+ gchar * candidate, GstWebRTCBin * webrtc)
+ {
+ IceCandidateItem item;
+ gboolean queue_task = FALSE;
+
+ item.mlineindex = session_id;
+ item.candidate = g_strdup (candidate);
+
+ ICE_LOCK (webrtc);
+ g_array_append_val (webrtc->priv->pending_local_ice_candidates, item);
+
+ /* Let the first pending candidate queue a task each time, which will
+ * handle any that arrive between now and when the task runs */
+ if (webrtc->priv->pending_local_ice_candidates->len == 1)
+ queue_task = TRUE;
+ ICE_UNLOCK (webrtc);
+
+ if (queue_task) {
+ GST_TRACE_OBJECT (webrtc, "Queueing on_ice_candidate_task");
+ gst_webrtc_bin_enqueue_task (webrtc,
+ (GstWebRTCBinFunc) _on_local_ice_candidate_task, NULL, NULL, NULL);
+ }
+ }
+
+ struct get_stats
+ {
+ GstPad *pad;
+ GstPromise *promise;
+ };
+
+ static void
+ _free_get_stats (struct get_stats *stats)
+ {
+ if (stats->pad)
+ gst_object_unref (stats->pad);
+ if (stats->promise)
+ gst_promise_unref (stats->promise);
+ g_free (stats);
+ }
+
+ /* https://www.w3.org/TR/webrtc/#dom-rtcpeerconnection-getstats() */
+ static GstStructure *
+ _get_stats_task (GstWebRTCBin * webrtc, struct get_stats *stats)
+ {
+ /* Our selector is the pad,
+ * https://www.w3.org/TR/webrtc/#dfn-stats-selection-algorithm
+ */
+
+ return gst_webrtc_bin_create_stats (webrtc, stats->pad);
+ }
+
+ static void
+ gst_webrtc_bin_get_stats (GstWebRTCBin * webrtc, GstPad * pad,
+ GstPromise * promise)
+ {
+ struct get_stats *stats;
+
+ g_return_if_fail (promise != NULL);
+ g_return_if_fail (pad == NULL || GST_IS_WEBRTC_BIN_PAD (pad));
+
+ stats = g_new0 (struct get_stats, 1);
+ stats->promise = gst_promise_ref (promise);
+ /* FIXME: check that pad exists in element */
+ if (pad)
+ stats->pad = gst_object_ref (pad);
+
+ if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _get_stats_task,
+ stats, (GDestroyNotify) _free_get_stats, promise)) {
+ GError *error =
+ g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED,
+ "Could not retrieve statistics. webrtcbin is closed.");
+ GstStructure *s = gst_structure_new ("application/x-gst-promise-error",
+ "error", G_TYPE_ERROR, error, NULL);
+
+ gst_promise_reply (promise, s);
+
+ g_clear_error (&error);
+ }
+ }
+
+ static GstWebRTCRTPTransceiver *
+ gst_webrtc_bin_add_transceiver (GstWebRTCBin * webrtc,
+ GstWebRTCRTPTransceiverDirection direction, GstCaps * caps)
+ {
+ WebRTCTransceiver *trans;
+
+ g_return_val_if_fail (direction != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE,
+ NULL);
+
+ PC_LOCK (webrtc);
+
+ trans =
+ _create_webrtc_transceiver (webrtc, direction, -1,
+ webrtc_kind_from_caps (caps), caps);
+ GST_LOG_OBJECT (webrtc,
+ "Created new unassociated transceiver %" GST_PTR_FORMAT, trans);
+
+ PC_UNLOCK (webrtc);
+
+ return gst_object_ref (trans);
+ }
+
+ static void
+ _deref_and_unref (GstObject ** object)
+ {
+ gst_clear_object (object);
+ }
+
+ static GArray *
+ gst_webrtc_bin_get_transceivers (GstWebRTCBin * webrtc)
+ {
+ GArray *arr = g_array_new (FALSE, TRUE, sizeof (GstWebRTCRTPTransceiver *));
+ int i;
+
+ PC_LOCK (webrtc);
+
+ g_array_set_clear_func (arr, (GDestroyNotify) _deref_and_unref);
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *trans =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+ gst_object_ref (trans);
+ g_array_append_val (arr, trans);
+ }
+ PC_UNLOCK (webrtc);
+
+ return arr;
+ }
+
+ static GstWebRTCRTPTransceiver *
+ gst_webrtc_bin_get_transceiver (GstWebRTCBin * webrtc, guint idx)
+ {
+ GstWebRTCRTPTransceiver *trans = NULL;
+
+ PC_LOCK (webrtc);
+
+ if (idx >= webrtc->priv->transceivers->len) {
+ GST_ERROR_OBJECT (webrtc, "No transceiver for idx %d", idx);
+ goto done;
+ }
+
+ trans = g_ptr_array_index (webrtc->priv->transceivers, idx);
+ gst_object_ref (trans);
+
+ done:
+ PC_UNLOCK (webrtc);
+ return trans;
+ }
+
+ static gboolean
+ gst_webrtc_bin_add_turn_server (GstWebRTCBin * webrtc, const gchar * uri)
+ {
+ gboolean ret;
+
+ g_return_val_if_fail (GST_IS_WEBRTC_BIN (webrtc), FALSE);
+ g_return_val_if_fail (uri != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (webrtc, "Adding turn server: %s", uri);
+
+ PC_LOCK (webrtc);
+ ret = gst_webrtc_ice_add_turn_server (webrtc->priv->ice, uri);
+ PC_UNLOCK (webrtc);
+
+ return ret;
+ }
+
+ static gboolean
+ copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+ }
+
+ static WebRTCDataChannel *
+ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label,
+ GstStructure * init_params)
+ {
+ gboolean ordered;
+ gint max_packet_lifetime;
+ gint max_retransmits;
+ const gchar *protocol;
+ gboolean negotiated;
+ gint id;
+ GstWebRTCPriorityType priority;
+ WebRTCDataChannel *ret;
+ gint max_channels = 65534;
+
+ g_return_val_if_fail (GST_IS_WEBRTC_BIN (webrtc), NULL);
+ g_return_val_if_fail (label != NULL, NULL);
+ g_return_val_if_fail (strlen (label) <= 65535, NULL);
++#ifndef __TIZEN__
+ g_return_val_if_fail (webrtc->priv->is_closed != TRUE, NULL);
++#endif
+
+ if (!init_params
+ || !gst_structure_get_boolean (init_params, "ordered", &ordered))
+ ordered = TRUE;
+ if (!init_params
+ || !gst_structure_get_int (init_params, "max-packet-lifetime",
+ &max_packet_lifetime))
+ max_packet_lifetime = -1;
+ if (!init_params
+ || !gst_structure_get_int (init_params, "max-retransmits",
+ &max_retransmits))
+ max_retransmits = -1;
+ /* both retransmits and lifetime cannot be set */
+ g_return_val_if_fail ((max_packet_lifetime == -1)
+ || (max_retransmits == -1), NULL);
+
+ if (!init_params
+ || !(protocol = gst_structure_get_string (init_params, "protocol")))
+ protocol = "";
+ g_return_val_if_fail (strlen (protocol) <= 65535, NULL);
+
+ if (!init_params
+ || !gst_structure_get_boolean (init_params, "negotiated", &negotiated))
+ negotiated = FALSE;
+ if (!negotiated || !init_params
+ || !gst_structure_get_int (init_params, "id", &id))
+ id = -1;
+ if (negotiated)
+ g_return_val_if_fail (id != -1, NULL);
+ g_return_val_if_fail (id < 65535, NULL);
+
+ if (!init_params
+ || !gst_structure_get_enum (init_params, "priority",
+ GST_TYPE_WEBRTC_PRIORITY_TYPE, (gint *) & priority))
+ priority = GST_WEBRTC_PRIORITY_TYPE_LOW;
+
+ /* FIXME: clamp max-retransmits and max-packet-lifetime */
+
+ if (webrtc->priv->sctp_transport) {
+ /* Let transport be the connection's [[SctpTransport]] slot.
+ *
+ * If the [[DataChannelId]] slot is not null, transport is in
+ * connected state and [[DataChannelId]] is greater or equal to the
+ * transport's [[MaxChannels]] slot, throw an OperationError.
+ */
+ g_object_get (webrtc->priv->sctp_transport, "max-channels", &max_channels,
+ NULL);
+
+ g_return_val_if_fail (id <= max_channels, NULL);
+ }
+
+ if (!_have_nice_elements (webrtc) || !_have_dtls_elements (webrtc) ||
+ !_have_sctp_elements (webrtc))
+ return NULL;
+
+ PC_LOCK (webrtc);
+ DC_LOCK (webrtc);
+ /* check if the id has been used already */
+ if (id != -1) {
+ WebRTCDataChannel *channel = _find_data_channel_for_id (webrtc, id);
+ if (channel) {
+ GST_ELEMENT_WARNING (webrtc, LIBRARY, SETTINGS,
+ ("Attempting to add a data channel with a duplicate ID: %i", id),
+ NULL);
+ DC_UNLOCK (webrtc);
+ PC_UNLOCK (webrtc);
+ return NULL;
+ }
+ } else if (webrtc->current_local_description
+ && webrtc->current_remote_description && webrtc->priv->sctp_transport
+ && webrtc->priv->sctp_transport->transport) {
+ /* else we can only generate an id if we're configured already. The other
+ * case for generating an id is on sdp setting */
+ id = _generate_data_channel_id (webrtc);
+ if (id == -1) {
+ GST_ELEMENT_WARNING (webrtc, RESOURCE, NOT_FOUND,
+ ("%s", "Failed to generate an identifier for a data channel"), NULL);
+ DC_UNLOCK (webrtc);
+ PC_UNLOCK (webrtc);
+ return NULL;
+ }
+ }
+
+ ret = g_object_new (WEBRTC_TYPE_DATA_CHANNEL, "label", label,
+ "ordered", ordered, "max-packet-lifetime", max_packet_lifetime,
+ "max-retransmits", max_retransmits, "protocol", protocol,
+ "negotiated", negotiated, "id", id, "priority", priority, NULL);
+
+ if (!ret) {
+ DC_UNLOCK (webrtc);
+ PC_UNLOCK (webrtc);
+ return ret;
+ }
+
+ gst_bin_add (GST_BIN (webrtc), ret->appsrc);
+ gst_bin_add (GST_BIN (webrtc), ret->appsink);
+
+ gst_element_sync_state_with_parent (ret->appsrc);
+ gst_element_sync_state_with_parent (ret->appsink);
+
+ ret = gst_object_ref (ret);
+ ret->webrtcbin = webrtc;
+ g_ptr_array_add (webrtc->priv->data_channels, ret);
+ DC_UNLOCK (webrtc);
+
+ gst_webrtc_bin_update_sctp_priority (webrtc);
+ webrtc_data_channel_link_to_sctp (ret, webrtc->priv->sctp_transport);
+ if (webrtc->priv->sctp_transport &&
+ webrtc->priv->sctp_transport->association_established
+ && !ret->parent.negotiated) {
+ webrtc_data_channel_start_negotiation (ret);
+ } else {
+ _update_need_negotiation (webrtc);
+ }
+
+ PC_UNLOCK (webrtc);
+ return ret;
+ }
+
+ /* === rtpbin signal implementations === */
+
+ static void
+ on_rtpbin_pad_added (GstElement * rtpbin, GstPad * new_pad,
+ GstWebRTCBin * webrtc)
+ {
+ gchar *new_pad_name = NULL;
+
+ new_pad_name = gst_pad_get_name (new_pad);
+ GST_TRACE_OBJECT (webrtc, "new rtpbin pad %s", new_pad_name);
+ if (g_str_has_prefix (new_pad_name, "recv_rtp_src_")) {
+ guint32 session_id = 0, ssrc = 0, pt = 0;
+ GstWebRTCRTPTransceiver *rtp_trans;
+ WebRTCTransceiver *trans;
+ TransportStream *stream;
+ GstWebRTCBinPad *pad;
+ guint media_idx = 0;
+ gboolean found_ssrc = FALSE;
+ guint i;
+
+ if (sscanf (new_pad_name, "recv_rtp_src_%u_%u_%u", &session_id, &ssrc,
+ &pt) != 3) {
+ g_critical ("Invalid rtpbin pad name \'%s\'", new_pad_name);
+ return;
+ }
+
+ stream = _find_transport_for_session (webrtc, session_id);
+ if (!stream)
+ g_warn_if_reached ();
+
+ media_idx = session_id;
+
+ for (i = 0; i < stream->remote_ssrcmap->len; i++) {
+ SsrcMapItem *item = g_ptr_array_index (stream->remote_ssrcmap, i);
+ if (item->ssrc == ssrc) {
+ media_idx = item->media_idx;
+ found_ssrc = TRUE;
+ break;
+ }
+ }
+
+ if (!found_ssrc) {
+ GST_WARNING_OBJECT (webrtc, "Could not find ssrc %u", ssrc);
+ }
+
+ rtp_trans = _find_transceiver_for_mline (webrtc, media_idx);
+ if (!rtp_trans)
+ g_warn_if_reached ();
+ trans = WEBRTC_TRANSCEIVER (rtp_trans);
+ g_assert (trans->stream == stream);
+
+ pad = _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans);
+
+ GST_TRACE_OBJECT (webrtc, "found pad %" GST_PTR_FORMAT
+ " for rtpbin pad name %s", pad, new_pad_name);
+ if (!pad)
+ g_warn_if_reached ();
+ gst_ghost_pad_set_target (GST_GHOST_PAD (pad), GST_PAD (new_pad));
+
+ if (webrtc->priv->running)
+ gst_pad_set_active (GST_PAD (pad), TRUE);
+ gst_pad_sticky_events_foreach (new_pad, copy_sticky_events, pad);
+ gst_element_add_pad (GST_ELEMENT (webrtc), GST_PAD (pad));
+ _remove_pending_pad (webrtc, pad);
+
+ gst_object_unref (pad);
+ }
+ g_free (new_pad_name);
+ }
+
+ /* only used for the receiving streams */
+ static GstCaps *
+ on_rtpbin_request_pt_map (GstElement * rtpbin, guint session_id, guint pt,
+ GstWebRTCBin * webrtc)
+ {
+ TransportStream *stream;
+ GstCaps *ret;
+
+ GST_DEBUG_OBJECT (webrtc, "getting pt map for pt %d in session %d", pt,
+ session_id);
+
+ stream = _find_transport_for_session (webrtc, session_id);
+ if (!stream)
+ goto unknown_session;
+
+ if ((ret = transport_stream_get_caps_for_pt (stream, pt)))
+ gst_caps_ref (ret);
+
+ GST_TRACE_OBJECT (webrtc, "Found caps %" GST_PTR_FORMAT " for pt %d in "
+ "session %d", ret, pt, session_id);
+
+ return ret;
+
+ unknown_session:
+ {
+ GST_DEBUG_OBJECT (webrtc, "unknown session %d", session_id);
+ return NULL;
+ }
+ }
+
+ static gboolean
+ _merge_structure (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ GstStructure *s = user_data;
+
+ gst_structure_id_set_value (s, field_id, value);
+
+ return TRUE;
+ }
+
+ static GstElement *
+ on_rtpbin_request_aux_sender (GstElement * rtpbin, guint session_id,
+ GstWebRTCBin * webrtc)
+ {
+ TransportStream *stream;
+ gboolean have_rtx = FALSE;
++#ifndef __TIZEN__
+ GstStructure *pt_map = NULL;
++#endif
+ GstElement *ret = NULL;
+
+ stream = _find_transport_for_session (webrtc, session_id);
+
+ if (stream)
+ have_rtx = transport_stream_get_pt (stream, "RTX") != 0;
+
++#ifndef __TIZEN__
+ GST_LOG_OBJECT (webrtc, "requesting aux sender for stream %" GST_PTR_FORMAT
+ " with pt map %" GST_PTR_FORMAT, stream, pt_map);
++#else
++ GST_LOG_OBJECT (webrtc, "requesting aux sender for stream %" GST_PTR_FORMAT, stream);
++#endif
+
+ if (have_rtx) {
+ GstElement *rtx;
+ GstPad *pad;
+ gchar *name;
+ GstStructure *merged_local_rtx_ssrc_map =
+ gst_structure_new_empty ("application/x-rtp-ssrc-map");
+ guint i;
+
+ if (stream->rtxsend) {
+ GST_WARNING_OBJECT (webrtc, "rtprtxsend already created! rtpbin bug?!");
+ goto out;
+ }
+
+ GST_INFO ("creating AUX sender");
+ ret = gst_bin_new (NULL);
+ rtx = gst_element_factory_make ("rtprtxsend", NULL);
+ g_object_set (rtx, "max-size-packets", 500, NULL);
+ _set_rtx_ptmap_from_stream (webrtc, stream);
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ WebRTCTransceiver *trans =
+ WEBRTC_TRANSCEIVER (g_ptr_array_index (webrtc->priv->transceivers,
+ i));
+
+ if (trans->stream == stream && trans->local_rtx_ssrc_map)
+ gst_structure_foreach (trans->local_rtx_ssrc_map,
+ _merge_structure, merged_local_rtx_ssrc_map);
+ }
+
+ g_object_set (rtx, "ssrc-map", merged_local_rtx_ssrc_map, NULL);
+ gst_structure_free (merged_local_rtx_ssrc_map);
+
+ gst_bin_add (GST_BIN (ret), rtx);
+
+ pad = gst_element_get_static_pad (rtx, "src");
+ name = g_strdup_printf ("src_%u", session_id);
+ gst_element_add_pad (ret, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ pad = gst_element_get_static_pad (rtx, "sink");
+ name = g_strdup_printf ("sink_%u", session_id);
+ gst_element_add_pad (ret, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ stream->rtxsend = gst_object_ref (rtx);
+ }
+
+ out:
++#ifndef __TIZEN__
+ if (pt_map)
+ gst_structure_free (pt_map);
+
++#endif
+ return ret;
+ }
+
+ static GstElement *
+ on_rtpbin_request_aux_receiver (GstElement * rtpbin, guint session_id,
+ GstWebRTCBin * webrtc)
+ {
+ GstElement *ret = NULL;
+ GstElement *prev = NULL;
+ GstPad *sinkpad = NULL;
+ TransportStream *stream;
+ gint red_pt = 0;
+ gint rtx_pt = 0;
+
+ stream = _find_transport_for_session (webrtc, session_id);
+
+ if (stream) {
+ red_pt = transport_stream_get_pt (stream, "RED");
+ rtx_pt = transport_stream_get_pt (stream, "RTX");
+ }
+
+ GST_LOG_OBJECT (webrtc, "requesting aux receiver for stream %" GST_PTR_FORMAT,
+ stream);
+
+ if (red_pt || rtx_pt)
+ ret = gst_bin_new (NULL);
+
+ if (rtx_pt) {
+ if (stream->rtxreceive) {
+ GST_WARNING_OBJECT (webrtc,
+ "rtprtxreceive already created! rtpbin bug?!");
+ goto error;
+ }
+
+ stream->rtxreceive = gst_element_factory_make ("rtprtxreceive", NULL);
+ _set_rtx_ptmap_from_stream (webrtc, stream);
+
+ gst_bin_add (GST_BIN (ret), stream->rtxreceive);
+
+ sinkpad = gst_element_get_static_pad (stream->rtxreceive, "sink");
+
+ prev = gst_object_ref (stream->rtxreceive);
+ }
+
+ if (red_pt) {
+ GstElement *rtpreddec = gst_element_factory_make ("rtpreddec", NULL);
+
+ GST_DEBUG_OBJECT (webrtc, "Creating RED decoder for pt %d in session %u",
+ red_pt, session_id);
+
+ gst_bin_add (GST_BIN (ret), rtpreddec);
+
+ g_object_set (rtpreddec, "pt", red_pt, NULL);
+
+ if (prev)
+ gst_element_link (prev, rtpreddec);
+ else
+ sinkpad = gst_element_get_static_pad (rtpreddec, "sink");
+
+ prev = rtpreddec;
+ }
+
+ if (sinkpad) {
+ gchar *name = g_strdup_printf ("sink_%u", session_id);
+ GstPad *ghost = gst_ghost_pad_new (name, sinkpad);
+ g_free (name);
+ gst_object_unref (sinkpad);
+ gst_element_add_pad (ret, ghost);
+ }
+
+ if (prev) {
+ gchar *name = g_strdup_printf ("src_%u", session_id);
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPad *ghost = gst_ghost_pad_new (name, srcpad);
+ g_free (name);
+ gst_object_unref (srcpad);
+ gst_element_add_pad (ret, ghost);
+ }
+
+ out:
+ return ret;
+
+ error:
+ if (ret)
+ gst_object_unref (ret);
+ goto out;
+ }
+
+ static GstElement *
+ on_rtpbin_request_fec_decoder (GstElement * rtpbin, guint session_id,
+ GstWebRTCBin * webrtc)
+ {
+ TransportStream *stream;
+ GstElement *ret = NULL;
+ gint pt = 0;
+ GObject *internal_storage;
+
+ stream = _find_transport_for_session (webrtc, session_id);
+
+ /* TODO: for now, we only support ulpfec, but once we support
+ * more algorithms, if the remote may use more than one algorithm,
+ * we will want to do the following:
+ *
+ * + Return a bin here, with the relevant FEC decoders plugged in
+ * and their payload type set to 0
+ * + Enable the decoders by setting the payload type only when
+ * we detect it (by connecting to ptdemux:new-payload-type for
+ * example)
+ */
+ if (stream)
+ pt = transport_stream_get_pt (stream, "ULPFEC");
+
+ if (pt) {
+ GST_DEBUG_OBJECT (webrtc, "Creating ULPFEC decoder for pt %d in session %u",
+ pt, session_id);
+ ret = gst_element_factory_make ("rtpulpfecdec", NULL);
+ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-storage", session_id,
+ &internal_storage);
+
+ g_object_set (ret, "pt", pt, "storage", internal_storage, NULL);
+ g_object_unref (internal_storage);
+ }
+
+ return ret;
+ }
+
+ static GstElement *
+ on_rtpbin_request_fec_encoder (GstElement * rtpbin, guint session_id,
+ GstWebRTCBin * webrtc)
+ {
+ GstElement *ret = NULL;
+ GstElement *prev = NULL;
+ TransportStream *stream;
+ guint ulpfec_pt = 0;
+ guint red_pt = 0;
+ GstPad *sinkpad = NULL;
+ GstWebRTCRTPTransceiver *trans;
+
+ stream = _find_transport_for_session (webrtc, session_id);
+ trans = _find_transceiver (webrtc, &session_id,
+ (FindTransceiverFunc) transceiver_match_for_mline);
+
+ if (stream) {
+ ulpfec_pt = transport_stream_get_pt (stream, "ULPFEC");
+ red_pt = transport_stream_get_pt (stream, "RED");
+ }
+
+ if (ulpfec_pt || red_pt)
+ ret = gst_bin_new (NULL);
+
+ if (ulpfec_pt) {
+ GstElement *fecenc = gst_element_factory_make ("rtpulpfecenc", NULL);
+ GstCaps *caps = transport_stream_get_caps_for_pt (stream, ulpfec_pt);
+
+ GST_DEBUG_OBJECT (webrtc,
+ "Creating ULPFEC encoder for session %d with pt %d", session_id,
+ ulpfec_pt);
+
+ gst_bin_add (GST_BIN (ret), fecenc);
+ sinkpad = gst_element_get_static_pad (fecenc, "sink");
+ g_object_set (fecenc, "pt", ulpfec_pt, "percentage",
+ WEBRTC_TRANSCEIVER (trans)->fec_percentage, NULL);
+
+
+ if (caps && !gst_caps_is_empty (caps)) {
+ const GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *media = gst_structure_get_string (s, "media");
+
+ if (!g_strcmp0 (media, "video"))
+ g_object_set (fecenc, "multipacket", TRUE, NULL);
+ }
+
+ prev = fecenc;
+ }
+
+ if (red_pt) {
+ GstElement *redenc = gst_element_factory_make ("rtpredenc", NULL);
+
+ GST_DEBUG_OBJECT (webrtc, "Creating RED encoder for session %d with pt %d",
+ session_id, red_pt);
+
+ gst_bin_add (GST_BIN (ret), redenc);
+ if (prev)
+ gst_element_link (prev, redenc);
+ else
+ sinkpad = gst_element_get_static_pad (redenc, "sink");
+
+ g_object_set (redenc, "pt", red_pt, "allow-no-red-blocks", TRUE, NULL);
+
+ prev = redenc;
+ }
+
+ if (sinkpad) {
+ GstPad *ghost = gst_ghost_pad_new ("sink", sinkpad);
+ gst_object_unref (sinkpad);
+ gst_element_add_pad (ret, ghost);
+ }
+
+ if (prev) {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPad *ghost = gst_ghost_pad_new ("src", srcpad);
+ gst_object_unref (srcpad);
+ gst_element_add_pad (ret, ghost);
+ }
+
+ return ret;
+ }
+
+ static void
+ on_rtpbin_bye_ssrc (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u received bye", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_bye_timeout (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u bye timeout", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_sender_timeout (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u sender timeout", session_id,
+ ssrc);
+ }
+
+ static void
+ on_rtpbin_new_ssrc (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u new ssrc", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_ssrc_active (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u active", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_ssrc_collision (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u collision", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_ssrc_sdes (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u sdes", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_ssrc_validated (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u validated", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_timeout (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u timeout", session_id, ssrc);
+ }
+
+ static void
+ on_rtpbin_new_sender_ssrc (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u new sender ssrc", session_id,
+ ssrc);
+ }
+
+ static void
+ on_rtpbin_sender_ssrc_active (GstElement * rtpbin, guint session_id, guint ssrc,
+ GstWebRTCBin * webrtc)
+ {
+ GST_INFO_OBJECT (webrtc, "session %u ssrc %u sender ssrc active", session_id,
+ ssrc);
+ }
+
+ static void
+ on_rtpbin_new_jitterbuffer (GstElement * rtpbin, GstElement * jitterbuffer,
+ guint session_id, guint ssrc, GstWebRTCBin * webrtc)
+ {
+ TransportStream *stream;
+ guint i;
+
+ PC_LOCK (webrtc);
+ GST_INFO_OBJECT (webrtc, "new jitterbuffer %" GST_PTR_FORMAT " for "
+ "session %u ssrc %u", jitterbuffer, session_id, ssrc);
+
+ if (!(stream = _find_transport_for_session (webrtc, session_id))) {
+ g_warn_if_reached ();
+ goto out;
+ }
+
+ /* XXX: this will fail with no ssrc in the remote sdp as used with e.g. simulcast
+ * newer SDP versions from chrome/firefox */
+ for (i = 0; i < stream->remote_ssrcmap->len; i++) {
+ SsrcMapItem *item = g_ptr_array_index (stream->remote_ssrcmap, i);
+
+ if (item->ssrc == ssrc) {
+ GstWebRTCRTPTransceiver *trans;
+ gboolean do_nack;
+
+ trans = _find_transceiver_for_mline (webrtc, item->media_idx);
+ if (!trans) {
+ g_warn_if_reached ();
+ break;
+ }
+
+ do_nack = WEBRTC_TRANSCEIVER (trans)->do_nack;
+ /* We don't set do-retransmission on rtpbin as we want per-session control */
+ GST_LOG_OBJECT (webrtc, "setting do-nack=%s for transceiver %"
+ GST_PTR_FORMAT " with transport %" GST_PTR_FORMAT
+ " rtp session %u ssrc %u", do_nack ? "true" : "false", trans, stream,
+ session_id, ssrc);
+ g_object_set (jitterbuffer, "do-retransmission", do_nack, NULL);
+
+ g_weak_ref_set (&item->rtpjitterbuffer, jitterbuffer);
+ break;
+ }
+ }
+ out:
+ PC_UNLOCK (webrtc);
+ }
+
+ static void
+ on_rtpbin_new_storage (GstElement * rtpbin, GstElement * storage,
+ guint session_id, GstWebRTCBin * webrtc)
+ {
+ guint64 latency = webrtc->priv->jb_latency;
+
+ /* Add an extra 50 ms for safey */
+ latency += RTPSTORAGE_EXTRA_TIME;
+ latency *= GST_MSECOND;
+
+ g_object_set (storage, "size-time", latency, NULL);
+ }
+
+ static GstElement *
+ _create_rtpbin (GstWebRTCBin * webrtc)
+ {
+ GstElement *rtpbin;
+
+ if (!(rtpbin = gst_element_factory_make ("rtpbin", "rtpbin")))
+ return NULL;
+
+ /* mandated by WebRTC */
+ gst_util_set_object_arg (G_OBJECT (rtpbin), "rtp-profile", "savpf");
+
+ g_object_set (rtpbin, "do-lost", TRUE, NULL);
+
+ g_signal_connect (rtpbin, "pad-added", G_CALLBACK (on_rtpbin_pad_added),
+ webrtc);
+ g_signal_connect (rtpbin, "request-pt-map",
+ G_CALLBACK (on_rtpbin_request_pt_map), webrtc);
+ g_signal_connect (rtpbin, "request-aux-sender",
+ G_CALLBACK (on_rtpbin_request_aux_sender), webrtc);
+ g_signal_connect (rtpbin, "request-aux-receiver",
+ G_CALLBACK (on_rtpbin_request_aux_receiver), webrtc);
+ g_signal_connect (rtpbin, "new-storage",
+ G_CALLBACK (on_rtpbin_new_storage), webrtc);
+ g_signal_connect (rtpbin, "request-fec-decoder",
+ G_CALLBACK (on_rtpbin_request_fec_decoder), webrtc);
+ g_signal_connect (rtpbin, "request-fec-encoder",
+ G_CALLBACK (on_rtpbin_request_fec_encoder), webrtc);
+ g_signal_connect (rtpbin, "on-bye-ssrc",
+ G_CALLBACK (on_rtpbin_bye_ssrc), webrtc);
+ g_signal_connect (rtpbin, "on-bye-timeout",
+ G_CALLBACK (on_rtpbin_bye_timeout), webrtc);
+ g_signal_connect (rtpbin, "on-new-ssrc",
+ G_CALLBACK (on_rtpbin_new_ssrc), webrtc);
+ g_signal_connect (rtpbin, "on-new-sender-ssrc",
+ G_CALLBACK (on_rtpbin_new_sender_ssrc), webrtc);
+ g_signal_connect (rtpbin, "on-sender-ssrc-active",
+ G_CALLBACK (on_rtpbin_sender_ssrc_active), webrtc);
+ g_signal_connect (rtpbin, "on-sender-timeout",
+ G_CALLBACK (on_rtpbin_sender_timeout), webrtc);
+ g_signal_connect (rtpbin, "on-ssrc-active",
+ G_CALLBACK (on_rtpbin_ssrc_active), webrtc);
+ g_signal_connect (rtpbin, "on-ssrc-collision",
+ G_CALLBACK (on_rtpbin_ssrc_collision), webrtc);
+ g_signal_connect (rtpbin, "on-ssrc-sdes",
+ G_CALLBACK (on_rtpbin_ssrc_sdes), webrtc);
+ g_signal_connect (rtpbin, "on-ssrc-validated",
+ G_CALLBACK (on_rtpbin_ssrc_validated), webrtc);
+ g_signal_connect (rtpbin, "on-timeout",
+ G_CALLBACK (on_rtpbin_timeout), webrtc);
+ g_signal_connect (rtpbin, "new-jitterbuffer",
+ G_CALLBACK (on_rtpbin_new_jitterbuffer), webrtc);
+
+ return rtpbin;
+ }
+
+ static GstStateChangeReturn
+ gst_webrtc_bin_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ GST_DEBUG ("changing state: %s => %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:{
+ if (!_have_nice_elements (webrtc) || !_have_dtls_elements (webrtc))
+ return GST_STATE_CHANGE_FAILURE;
+ _start_thread (webrtc);
+ PC_LOCK (webrtc);
+ _update_need_negotiation (webrtc);
+ PC_UNLOCK (webrtc);
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ webrtc->priv->running = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* Mangle the return value to NO_PREROLL as that's what really is
+ * occurring here however cannot be propagated correctly due to nicesrc
+ * requiring that it be in PLAYING already in order to send/receive
+ * correctly :/ */
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ webrtc->priv->running = FALSE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ _stop_thread (webrtc);
++#ifdef __TIZEN__
++ webrtc->priv->need_negotiation = FALSE;
++#endif
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstPadProbeReturn
+ sink_pad_block (GstPad * pad, GstPadProbeInfo * info, gpointer unused)
+ {
+ GST_LOG_OBJECT (pad, "blocking pad with data %" GST_PTR_FORMAT, info->data);
+
+ return GST_PAD_PROBE_OK;
+ }
+
+
+ static GstPad *
+ gst_webrtc_bin_request_new_pad (GstElement * element, GstPadTemplate * templ,
+ const gchar * name, const GstCaps * caps)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (element);
+ GstWebRTCRTPTransceiver *trans = NULL;
+ GstWebRTCBinPad *pad = NULL;
+ guint serial;
+ gboolean lock_mline = FALSE;
+
+ if (!_have_nice_elements (webrtc) || !_have_dtls_elements (webrtc))
+ return NULL;
+
+ if (templ->direction != GST_PAD_SINK ||
+ g_strcmp0 (templ->name_template, "sink_%u") != 0) {
+ GST_ERROR_OBJECT (element, "Requested pad that shouldn't be requestable");
+ return NULL;
+ }
+
+ PC_LOCK (webrtc);
+
+ if (name == NULL || strlen (name) < 6 || !g_str_has_prefix (name, "sink_")) {
+ /* no name given when requesting the pad, use next available int */
+ serial = webrtc->priv->max_sink_pad_serial++;
+ } else {
+ /* parse serial number from requested padname */
+ serial = g_ascii_strtoull (&name[5], NULL, 10);
+ lock_mline = TRUE;
+ }
+
+ if (lock_mline) {
+ GstWebRTCBinPad *pad2;
+
+ trans = _find_transceiver_for_mline (webrtc, serial);
+
+ if (trans) {
+ /* Reject transceivers that are only for receiving ... */
+ if (trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY ||
+ trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) {
+ gchar *direction =
+ g_enum_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ trans->direction);
+ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for"
+ " existing m-line %d, but the transceiver's direction is %s",
+ name, serial, direction);
+ g_free (direction);
+ goto error_out;
+ }
+
+ /* Reject transceivers that already have a pad allocated */
+ pad2 = _find_pad_for_transceiver (webrtc, GST_PAD_SINK, trans);
+ if (pad2) {
+ GST_ERROR_OBJECT (element, "Trying to request pad %s for m-line %d, "
+ " but the transceiver associated with this m-line already has pad"
+ " %s", name, serial, GST_PAD_NAME (pad2));
+ gst_object_unref (pad2);
+ goto error_out;
+ }
+
+ if (caps) {
+ GST_OBJECT_LOCK (trans);
+ if (trans->codec_preferences &&
+ !gst_caps_can_intersect (caps, trans->codec_preferences)) {
+ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for"
+ " existing m-line %d, but requested caps %" GST_PTR_FORMAT
+ " don't match existing codec preferences %" GST_PTR_FORMAT,
+ name, serial, caps, trans->codec_preferences);
+ GST_OBJECT_UNLOCK (trans);
+ goto error_out;
+ }
+ GST_OBJECT_UNLOCK (trans);
+
+ if (trans->kind != GST_WEBRTC_KIND_UNKNOWN) {
+ GstWebRTCKind kind = webrtc_kind_from_caps (caps);
+
+ if (trans->kind != kind) {
+ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for"
+ " existing m-line %d, but requested caps %" GST_PTR_FORMAT
+ " don't match transceiver kind %d",
+ name, serial, caps, trans->kind);
+ goto error_out;
+ }
+ }
+ }
+ }
+ }
+
+ /* Let's try to find a free transceiver that matches */
+ if (!trans) {
+ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN;
+ guint i;
+
+ kind = webrtc_kind_from_caps (caps);
+
+ for (i = 0; i < webrtc->priv->transceivers->len; i++) {
+ GstWebRTCRTPTransceiver *tmptrans =
+ g_ptr_array_index (webrtc->priv->transceivers, i);
+ GstWebRTCBinPad *pad2;
+ gboolean has_matching_caps;
+
+ /* Ignore transceivers with a non-matching kind */
+ if (tmptrans->kind != GST_WEBRTC_KIND_UNKNOWN &&
+ kind != GST_WEBRTC_KIND_UNKNOWN && tmptrans->kind != kind)
+ continue;
+
+ /* Ignore stopped transmitters */
+ if (tmptrans->stopped)
+ continue;
+
+ /* Ignore transceivers that are only for receiving ... */
+ if (tmptrans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY
+ || tmptrans->direction ==
+ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE)
+ continue;
+
+ /* Ignore transceivers that already have a pad allocated */
+ pad2 = _find_pad_for_transceiver (webrtc, GST_PAD_SINK, tmptrans);
+ if (pad2) {
+ gst_object_unref (pad2);
+ continue;
+ }
+
+ GST_OBJECT_LOCK (tmptrans);
+ has_matching_caps = (caps && tmptrans->codec_preferences &&
+ !gst_caps_can_intersect (caps, tmptrans->codec_preferences));
+ GST_OBJECT_UNLOCK (tmptrans);
+ /* Ignore transceivers with non-matching caps */
+ if (!has_matching_caps)
+ continue;
+
+ trans = tmptrans;
+ break;
+ }
+ }
+
+ if (!trans) {
+ trans = GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc,
+ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV, -1,
+ webrtc_kind_from_caps (caps), NULL));
+ GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT, trans);
+ } else {
+ GST_LOG_OBJECT (webrtc, "Using existing transceiver %" GST_PTR_FORMAT
+ " for mline %u", trans, serial);
+ if (caps) {
+ if (!_update_transceiver_kind_from_caps (trans, caps))
+ GST_WARNING_OBJECT (webrtc,
+ "Trying to change transceiver %d kind from %d to %d",
+ serial, trans->kind, webrtc_kind_from_caps (caps));
+ }
+ }
+ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, trans, serial);
+
+ pad->block_id = gst_pad_add_probe (GST_PAD (pad), GST_PAD_PROBE_TYPE_BLOCK |
+ GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST,
+ (GstPadProbeCallback) sink_pad_block, NULL, NULL);
+ webrtc->priv->pending_sink_transceivers =
+ g_list_append (webrtc->priv->pending_sink_transceivers,
+ gst_object_ref (pad));
+
+ if (lock_mline) {
+ WebRTCTransceiver *wtrans = WEBRTC_TRANSCEIVER (trans);
+ wtrans->mline_locked = TRUE;
+ trans->mline = serial;
+ }
+
+ PC_UNLOCK (webrtc);
+
+ _add_pad (webrtc, pad);
+
+ return GST_PAD (pad);
+
+ error_out:
+ PC_UNLOCK (webrtc);
+ return NULL;
+ }
+
+ static void
+ gst_webrtc_bin_release_pad (GstElement * element, GstPad * pad)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (element);
+ GstWebRTCBinPad *webrtc_pad = GST_WEBRTC_BIN_PAD (pad);
+
+ GST_DEBUG_OBJECT (webrtc, "Releasing %" GST_PTR_FORMAT, webrtc_pad);
+
+ /* remove the transceiver from the pad so that subsequent code doesn't use
+ * a possibly dead transceiver */
+ PC_LOCK (webrtc);
+ if (webrtc_pad->trans)
++#ifdef __TIZEN__
++ _remove_webrtc_transceiver (webrtc, webrtc_pad->trans);
++#else
+ gst_object_unref (webrtc_pad->trans);
++#endif
+ webrtc_pad->trans = NULL;
+ gst_caps_replace (&webrtc_pad->received_caps, NULL);
+ PC_UNLOCK (webrtc);
+
+ _remove_pad (webrtc, webrtc_pad);
+
+ PC_LOCK (webrtc);
+ _update_need_negotiation (webrtc);
+ PC_UNLOCK (webrtc);
+ }
+
+ static void
+ _update_rtpstorage_latency (GstWebRTCBin * webrtc)
+ {
+ guint i;
+ guint64 latency_ns;
+
+ /* Add an extra 50 ms for safety */
+ latency_ns = webrtc->priv->jb_latency + RTPSTORAGE_EXTRA_TIME;
+ latency_ns *= GST_MSECOND;
+
+ for (i = 0; i < webrtc->priv->transports->len; i++) {
+ TransportStream *stream = g_ptr_array_index (webrtc->priv->transports, i);
+ GObject *storage = NULL;
+
+ g_signal_emit_by_name (webrtc->rtpbin, "get-storage", stream->session_id,
+ &storage);
+
+ g_object_set (storage, "size-time", latency_ns, NULL);
+
+ g_object_unref (storage);
+ }
+ }
+
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++static void
++_update_drop_probability (GstWebRTCBin * webrtc, gfloat probability, gboolean sender)
++{
++ GValue value = G_VALUE_INIT;
++ GstElement *element;
++ GstIterator *bin_iterator = gst_bin_iterate_sorted (GST_BIN (webrtc));
++ g_assert (bin_iterator);
++
++ while (gst_iterator_next (bin_iterator, &value) == GST_ITERATOR_OK) {
++ element = GST_ELEMENT (g_value_get_object (&value));
++ if (g_strrstr (GST_ELEMENT_NAME (element), sender ? "netsim_send" : "netsim_recv"))
++ g_object_set (element, "drop-probability", probability, NULL);
++ g_value_reset (&value);
++ }
++
++ g_value_unset (&value);
++ gst_iterator_free (bin_iterator);
++}
++#endif
++
+ static void
+ gst_webrtc_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object);
+
+ switch (prop_id) {
+ case PROP_STUN_SERVER:
+ gst_webrtc_ice_set_stun_server (webrtc->priv->ice,
+ g_value_get_string (value));
+ break;
+ case PROP_TURN_SERVER:
+ gst_webrtc_ice_set_turn_server (webrtc->priv->ice,
+ g_value_get_string (value));
+ break;
+ case PROP_BUNDLE_POLICY:
+ if (g_value_get_enum (value) == GST_WEBRTC_BUNDLE_POLICY_BALANCED) {
+ GST_ERROR_OBJECT (object, "Balanced bundle policy not implemented yet");
+ } else {
+ webrtc->bundle_policy = g_value_get_enum (value);
+ }
+ break;
+ case PROP_ICE_TRANSPORT_POLICY:
+ webrtc->ice_transport_policy = g_value_get_enum (value);
+ gst_webrtc_ice_set_force_relay (webrtc->priv->ice,
+ webrtc->ice_transport_policy ==
+ GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY ? TRUE : FALSE);
+ break;
+ case PROP_LATENCY:
+ g_object_set_property (G_OBJECT (webrtc->rtpbin), "latency", value);
+ webrtc->priv->jb_latency = g_value_get_uint (value);
+ _update_rtpstorage_latency (webrtc);
+ break;
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ case PROP_NETSIM:
++ webrtc->priv->netsim = g_value_get_boolean (value);
++ _update_drop_probability (webrtc, webrtc->priv->netsim ?
++ webrtc->priv->drop_probability_sender : 0.0, TRUE);
++ _update_drop_probability (webrtc, webrtc->priv->netsim ?
++ webrtc->priv->drop_probability_receiver : 0.0, FALSE);
++ break;
++ case PROP_DROP_PROBABILITY_SENDER:
++ webrtc->priv->drop_probability_sender = g_value_get_float (value);
++ _update_drop_probability (webrtc, webrtc->priv->drop_probability_sender, TRUE);
++ break;
++ case PROP_DROP_PROBABILITY_RECEIVER:
++ webrtc->priv->drop_probability_receiver = g_value_get_float (value);
++ _update_drop_probability (webrtc, webrtc->priv->drop_probability_receiver, FALSE);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_webrtc_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object);
+
+ PC_LOCK (webrtc);
+ switch (prop_id) {
+ case PROP_CONNECTION_STATE:
+ g_value_set_enum (value, webrtc->peer_connection_state);
+ break;
+ case PROP_SIGNALING_STATE:
+ g_value_set_enum (value, webrtc->signaling_state);
+ break;
+ case PROP_ICE_GATHERING_STATE:
+ g_value_set_enum (value, webrtc->ice_gathering_state);
+ break;
+ case PROP_ICE_CONNECTION_STATE:
+ g_value_set_enum (value, webrtc->ice_connection_state);
+ break;
+ case PROP_LOCAL_DESCRIPTION:
+ if (webrtc->pending_local_description)
+ g_value_set_boxed (value, webrtc->pending_local_description);
+ else if (webrtc->current_local_description)
+ g_value_set_boxed (value, webrtc->current_local_description);
+ else
+ g_value_set_boxed (value, NULL);
+ break;
+ case PROP_CURRENT_LOCAL_DESCRIPTION:
+ g_value_set_boxed (value, webrtc->current_local_description);
+ break;
+ case PROP_PENDING_LOCAL_DESCRIPTION:
+ g_value_set_boxed (value, webrtc->pending_local_description);
+ break;
+ case PROP_REMOTE_DESCRIPTION:
+ if (webrtc->pending_remote_description)
+ g_value_set_boxed (value, webrtc->pending_remote_description);
+ else if (webrtc->current_remote_description)
+ g_value_set_boxed (value, webrtc->current_remote_description);
+ else
+ g_value_set_boxed (value, NULL);
+ break;
+ case PROP_CURRENT_REMOTE_DESCRIPTION:
+ g_value_set_boxed (value, webrtc->current_remote_description);
+ break;
+ case PROP_PENDING_REMOTE_DESCRIPTION:
+ g_value_set_boxed (value, webrtc->pending_remote_description);
+ break;
+ case PROP_STUN_SERVER:
+ g_value_take_string (value,
+ gst_webrtc_ice_get_stun_server (webrtc->priv->ice));
+ break;
+ case PROP_TURN_SERVER:
+ g_value_take_string (value,
+ gst_webrtc_ice_get_turn_server (webrtc->priv->ice));
+ break;
+ case PROP_BUNDLE_POLICY:
+ g_value_set_enum (value, webrtc->bundle_policy);
+ break;
+ case PROP_ICE_TRANSPORT_POLICY:
+ g_value_set_enum (value, webrtc->ice_transport_policy);
+ break;
+ case PROP_ICE_AGENT:
+ g_value_set_object (value, webrtc->priv->ice);
+ break;
+ case PROP_LATENCY:
+ g_value_set_uint (value, webrtc->priv->jb_latency);
+ break;
+ case PROP_SCTP_TRANSPORT:
+ g_value_set_object (value, webrtc->priv->sctp_transport);
+ break;
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ case PROP_NETSIM:
++ g_value_set_boolean (value, webrtc->priv->netsim);
++ break;
++ case PROP_DROP_PROBABILITY_SENDER:
++ g_value_set_float (value, webrtc->priv->drop_probability_sender);
++ break;
++ case PROP_DROP_PROBABILITY_RECEIVER:
++ g_value_set_float (value, webrtc->priv->drop_probability_receiver);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ PC_UNLOCK (webrtc);
+ }
+
+ static void
+ gst_webrtc_bin_constructed (GObject * object)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object);
+ gchar *name;
+
+ name = g_strdup_printf ("%s:ice", GST_OBJECT_NAME (webrtc));
+ webrtc->priv->ice = gst_webrtc_ice_new (name);
+
+ gst_webrtc_ice_set_on_ice_candidate (webrtc->priv->ice,
+ (GstWebRTCIceOnCandidateFunc) _on_local_ice_candidate_cb, webrtc, NULL);
+
+ g_free (name);
+ }
+
+ static void
+ _free_pending_pad (GstPad * pad)
+ {
+ gst_object_unref (pad);
+ }
+
+ static void
+ gst_webrtc_bin_dispose (GObject * object)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object);
+
+ if (webrtc->priv->ice)
+ gst_object_unref (webrtc->priv->ice);
+ webrtc->priv->ice = NULL;
+
+ if (webrtc->priv->ice_stream_map)
+ g_array_free (webrtc->priv->ice_stream_map, TRUE);
+ webrtc->priv->ice_stream_map = NULL;
+
+ g_clear_object (&webrtc->priv->sctp_transport);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_webrtc_bin_finalize (GObject * object)
+ {
+ GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object);
+
+ if (webrtc->priv->transports)
+ g_ptr_array_free (webrtc->priv->transports, TRUE);
+ webrtc->priv->transports = NULL;
+
+ if (webrtc->priv->transceivers)
+ g_ptr_array_free (webrtc->priv->transceivers, TRUE);
+ webrtc->priv->transceivers = NULL;
+
+ if (webrtc->priv->data_channels)
+ g_ptr_array_free (webrtc->priv->data_channels, TRUE);
+ webrtc->priv->data_channels = NULL;
+
+ if (webrtc->priv->pending_data_channels)
+ g_ptr_array_free (webrtc->priv->pending_data_channels, TRUE);
+ webrtc->priv->pending_data_channels = NULL;
+
+ if (webrtc->priv->pending_remote_ice_candidates)
+ g_array_free (webrtc->priv->pending_remote_ice_candidates, TRUE);
+ webrtc->priv->pending_remote_ice_candidates = NULL;
+
+ if (webrtc->priv->pending_local_ice_candidates)
+ g_array_free (webrtc->priv->pending_local_ice_candidates, TRUE);
+ webrtc->priv->pending_local_ice_candidates = NULL;
+
+ if (webrtc->priv->pending_pads)
+ g_list_free_full (webrtc->priv->pending_pads,
+ (GDestroyNotify) _free_pending_pad);
+ webrtc->priv->pending_pads = NULL;
+
+ if (webrtc->priv->pending_sink_transceivers)
+ g_list_free_full (webrtc->priv->pending_sink_transceivers,
+ (GDestroyNotify) gst_object_unref);
+ webrtc->priv->pending_sink_transceivers = NULL;
+
+ if (webrtc->current_local_description)
+ gst_webrtc_session_description_free (webrtc->current_local_description);
+ webrtc->current_local_description = NULL;
+ if (webrtc->pending_local_description)
+ gst_webrtc_session_description_free (webrtc->pending_local_description);
+ webrtc->pending_local_description = NULL;
+
+ if (webrtc->current_remote_description)
+ gst_webrtc_session_description_free (webrtc->current_remote_description);
+ webrtc->current_remote_description = NULL;
+ if (webrtc->pending_remote_description)
+ gst_webrtc_session_description_free (webrtc->pending_remote_description);
+ webrtc->pending_remote_description = NULL;
+
+ if (webrtc->priv->last_generated_answer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_answer);
+ webrtc->priv->last_generated_answer = NULL;
+ if (webrtc->priv->last_generated_offer)
+ gst_webrtc_session_description_free (webrtc->priv->last_generated_offer);
+ webrtc->priv->last_generated_offer = NULL;
+
+ g_mutex_clear (DC_GET_LOCK (webrtc));
+ g_mutex_clear (ICE_GET_LOCK (webrtc));
+ g_mutex_clear (PC_GET_LOCK (webrtc));
+ g_cond_clear (PC_GET_COND (webrtc));
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) klass;
+
+ element_class->request_new_pad = gst_webrtc_bin_request_new_pad;
+ element_class->release_pad = gst_webrtc_bin_release_pad;
+ element_class->change_state = gst_webrtc_bin_change_state;
+
+ gst_element_class_add_static_pad_template_with_gtype (element_class,
+ &sink_template, GST_TYPE_WEBRTC_BIN_PAD);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_metadata (element_class, "WebRTC Bin",
+ "Filter/Network/WebRTC", "A bin for webrtc connections",
+ "Matthew Waters <matthew@centricular.com>");
+
+ gobject_class->constructed = gst_webrtc_bin_constructed;
+ gobject_class->get_property = gst_webrtc_bin_get_property;
+ gobject_class->set_property = gst_webrtc_bin_set_property;
+ gobject_class->dispose = gst_webrtc_bin_dispose;
+ gobject_class->finalize = gst_webrtc_bin_finalize;
+
+ g_object_class_install_property (gobject_class,
+ PROP_LOCAL_DESCRIPTION,
+ g_param_spec_boxed ("local-description", "Local Description",
+ "The local SDP description in use for this connection. "
+ "Favours a pending description over the current description",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_CURRENT_LOCAL_DESCRIPTION,
+ g_param_spec_boxed ("current-local-description",
+ "Current Local Description",
+ "The local description that was successfully negotiated the last time "
+ "the connection transitioned into the stable state",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_PENDING_LOCAL_DESCRIPTION,
+ g_param_spec_boxed ("pending-local-description",
+ "Pending Local Description",
+ "The local description that is in the process of being negotiated plus "
+ "any local candidates that have been generated by the ICE Agent since the "
+ "offer or answer was created",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_REMOTE_DESCRIPTION,
+ g_param_spec_boxed ("remote-description", "Remote Description",
+ "The remote SDP description to use for this connection. "
+ "Favours a pending description over the current description",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_CURRENT_REMOTE_DESCRIPTION,
+ g_param_spec_boxed ("current-remote-description",
+ "Current Remote Description",
+ "The last remote description that was successfully negotiated the last "
+ "time the connection transitioned into the stable state plus any remote "
+ "candidates that have been supplied via addIceCandidate() since the offer "
+ "or answer was created",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_PENDING_REMOTE_DESCRIPTION,
+ g_param_spec_boxed ("pending-remote-description",
+ "Pending Remote Description",
+ "The remote description that is in the process of being negotiated, "
+ "complete with any remote candidates that have been supplied via "
+ "addIceCandidate() since the offer or answer was created",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_STUN_SERVER,
+ g_param_spec_string ("stun-server", "STUN Server",
+ "The STUN server of the form stun://hostname:port",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_TURN_SERVER,
+ g_param_spec_string ("turn-server", "TURN Server",
+ "The TURN server of the form turn(s)://username:password@host:port. "
+ "This is a convenience property, use #GstWebRTCBin::add-turn-server "
+ "if you wish to use multiple TURN servers",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_CONNECTION_STATE,
+ g_param_spec_enum ("connection-state", "Connection State",
+ "The overall connection state of this element",
+ GST_TYPE_WEBRTC_PEER_CONNECTION_STATE,
+ GST_WEBRTC_PEER_CONNECTION_STATE_NEW,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_SIGNALING_STATE,
+ g_param_spec_enum ("signaling-state", "Signaling State",
+ "The signaling state of this element",
+ GST_TYPE_WEBRTC_SIGNALING_STATE,
+ GST_WEBRTC_SIGNALING_STATE_STABLE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_CONNECTION_STATE,
+ g_param_spec_enum ("ice-connection-state", "ICE connection state",
+ "The collective connection state of all ICETransport's",
+ GST_TYPE_WEBRTC_ICE_CONNECTION_STATE,
+ GST_WEBRTC_ICE_CONNECTION_STATE_NEW,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_GATHERING_STATE,
+ g_param_spec_enum ("ice-gathering-state", "ICE gathering state",
+ "The collective gathering state of all ICETransport's",
+ GST_TYPE_WEBRTC_ICE_GATHERING_STATE,
+ GST_WEBRTC_ICE_GATHERING_STATE_NEW,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_BUNDLE_POLICY,
+ g_param_spec_enum ("bundle-policy", "Bundle Policy",
+ "The policy to apply for bundling",
+ GST_TYPE_WEBRTC_BUNDLE_POLICY,
+ GST_WEBRTC_BUNDLE_POLICY_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_TRANSPORT_POLICY,
+ g_param_spec_enum ("ice-transport-policy", "ICE Transport Policy",
+ "The policy to apply for ICE transport",
+ GST_TYPE_WEBRTC_ICE_TRANSPORT_POLICY,
+ GST_WEBRTC_ICE_TRANSPORT_POLICY_ALL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_AGENT,
+ g_param_spec_object ("ice-agent", "WebRTC ICE agent",
+ "The WebRTC ICE agent",
+ GST_TYPE_WEBRTC_ICE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstWebRTCBin:latency:
+ *
+ * Default duration to buffer in the jitterbuffers (in ms)
+ *
+ * Since: 1.18
+ */
+
+ g_object_class_install_property (gobject_class,
+ PROP_LATENCY,
+ g_param_spec_uint ("latency", "Latency",
+ "Default duration to buffer in the jitterbuffers (in ms)",
+ 0, G_MAXUINT, DEFAULT_JB_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstWebRTCBin:sctp-transport:
+ *
+ * The WebRTC SCTP Transport
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_SCTP_TRANSPORT,
+ g_param_spec_object ("sctp-transport", "WebRTC SCTP Transport",
+ "The WebRTC SCTP Transport",
+ GST_TYPE_WEBRTC_SCTP_TRANSPORT,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ g_object_class_install_property (gobject_class,
++ PROP_NETSIM,
++ g_param_spec_boolean ("netsim", "Use network simulator",
++ "Use network simulator for packet loss",
++ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class,
++ PROP_DROP_PROBABILITY_SENDER,
++ g_param_spec_float ("drop-probability-sender", "Drop Probability for sender",
++ "The Probability a sending RTP buffer is dropped",
++ 0.0, 1.0, 0.0,
++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class,
++ PROP_DROP_PROBABILITY_RECEIVER,
++ g_param_spec_float ("drop-probability-receiver", "Drop Probability for receiver",
++ "The Probability a received RTP buffer is dropped",
++ 0.0, 1.0, 0.0,
++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
++
++#endif
++
+ /**
+ * GstWebRTCBin::create-offer:
+ * @object: the #webrtcbin
+ * @options: (nullable): create-offer options
+ * @promise: a #GstPromise which will contain the offer
+ */
+ gst_webrtc_bin_signals[CREATE_OFFER_SIGNAL] =
+ g_signal_new_class_handler ("create-offer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_create_offer), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_STRUCTURE, GST_TYPE_PROMISE);
+
+ /**
+ * GstWebRTCBin::create-answer:
+ * @object: the #webrtcbin
+ * @options: (nullable): create-answer options
+ * @promise: a #GstPromise which will contain the answer
+ */
+ gst_webrtc_bin_signals[CREATE_ANSWER_SIGNAL] =
+ g_signal_new_class_handler ("create-answer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_create_answer), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_STRUCTURE, GST_TYPE_PROMISE);
+
+ /**
+ * GstWebRTCBin::set-local-description:
+ * @object: the #GstWebRTCBin
+ * @desc: a #GstWebRTCSessionDescription description
+ * @promise: (nullable): a #GstPromise to be notified when it's set
+ */
+ gst_webrtc_bin_signals[SET_LOCAL_DESCRIPTION_SIGNAL] =
+ g_signal_new_class_handler ("set-local-description",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_set_local_description), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_WEBRTC_SESSION_DESCRIPTION, GST_TYPE_PROMISE);
+
+ /**
+ * GstWebRTCBin::set-remote-description:
+ * @object: the #GstWebRTCBin
+ * @desc: a #GstWebRTCSessionDescription description
+ * @promise: (nullable): a #GstPromise to be notified when it's set
+ */
+ gst_webrtc_bin_signals[SET_REMOTE_DESCRIPTION_SIGNAL] =
+ g_signal_new_class_handler ("set-remote-description",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_set_remote_description), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_WEBRTC_SESSION_DESCRIPTION, GST_TYPE_PROMISE);
+
+ /**
+ * GstWebRTCBin::add-ice-candidate:
+ * @object: the #webrtcbin
+ * @mline_index: the index of the media description in the SDP
+ * @ice-candidate: an ice candidate or NULL/"" to mark that no more candidates
+ * will arrive
+ */
+ gst_webrtc_bin_signals[ADD_ICE_CANDIDATE_SIGNAL] =
+ g_signal_new_class_handler ("add-ice-candidate",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_add_ice_candidate), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_STRING);
+
+ /**
+ * GstWebRTCBin::get-stats:
+ * @object: the #webrtcbin
+ * @pad: (nullable): A #GstPad to get the stats for, or %NULL for all
+ * @promise: a #GstPromise for the result
+ *
+ * The @promise will contain the result of retrieving the session statistics.
+ * The structure will be named 'application/x-webrtc-stats and contain the
+ * following based on the webrtc-stats spec available from
+ * https://www.w3.org/TR/webrtc-stats/. As the webrtc-stats spec is a draft
+ * and is constantly changing these statistics may be changed to fit with
+ * the latest spec.
+ *
+ * Each field key is a unique identifier for each RTCStats
+ * (https://www.w3.org/TR/webrtc/#rtcstats-dictionary) value (another
+ * GstStructure) in the RTCStatsReport
+ * (https://www.w3.org/TR/webrtc/#rtcstatsreport-object). Each supported
+ * field in the RTCStats subclass is outlined below.
+ *
+ * Each statistics structure contains the following values as defined by
+ * the RTCStats dictionary (https://www.w3.org/TR/webrtc/#rtcstats-dictionary).
+ *
+ * "timestamp" G_TYPE_DOUBLE timestamp the statistics were generated
+ * "type" GST_TYPE_WEBRTC_STATS_TYPE the type of statistics reported
+ * "id" G_TYPE_STRING unique identifier
+ *
+ * RTCCodecStats supported fields (https://w3c.github.io/webrtc-stats/#codec-dict*)
+ *
+ * "payload-type" G_TYPE_UINT the rtp payload number in use
+ * "clock-rate" G_TYPE_UINT the rtp clock-rate
+ *
+ * RTCRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#streamstats-dict*)
+ *
+ * "ssrc" G_TYPE_STRING the rtp sequence src in use
+ * "transport-id" G_TYPE_STRING identifier for the associated RTCTransportStats for this stream
+ * "codec-id" G_TYPE_STRING identifier for the associated RTCCodecStats for this stream
+ * "fir-count" G_TYPE_UINT FIR requests received by the sender (only for local statistics)
+ * "pli-count" G_TYPE_UINT PLI requests received by the sender (only for local statistics)
+ * "nack-count" G_TYPE_UINT NACK requests received by the sender (only for local statistics)
+ *
+ * RTCReceivedStreamStats supported fields (https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict*)
+ *
+ * "packets-received" G_TYPE_UINT64 number of packets received (only for local inbound)
+ * "bytes-received" G_TYPE_UINT64 number of bytes received (only for local inbound)
+ * "packets-lost" G_TYPE_UINT number of packets lost
+ * "jitter" G_TYPE_DOUBLE packet jitter measured in secondss
+ *
+ * RTCInboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict*)
+ *
+ * "remote-id" G_TYPE_STRING identifier for the associated RTCRemoteOutboundRTPStreamStats
+ *
+ * RTCRemoteInboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict*)
+ *
+ * "local-id" G_TYPE_STRING identifier for the associated RTCOutboundRTPSTreamStats
+ * "round-trip-time" G_TYPE_DOUBLE round trip time of packets measured in seconds
+ *
+ * RTCSentRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#sentrtpstats-dict*)
+ *
+ * "packets-sent" G_TYPE_UINT64 number of packets sent (only for local outbound)
+ * "bytes-sent" G_TYPE_UINT64 number of packets sent (only for local outbound)
+ *
+ * RTCOutboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict*)
+ *
+ * "remote-id" G_TYPE_STRING identifier for the associated RTCRemoteInboundRTPSTreamStats
+ *
+ * RTCRemoteOutboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict*)
+ *
+ * "local-id" G_TYPE_STRING identifier for the associated RTCInboundRTPSTreamStats
+ *
+ */
+ gst_webrtc_bin_signals[GET_STATS_SIGNAL] =
+ g_signal_new_class_handler ("get-stats",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_get_stats), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_PAD, GST_TYPE_PROMISE);
+
+ /**
+ * GstWebRTCBin::on-negotiation-needed:
+ * @object: the #webrtcbin
+ */
+ gst_webrtc_bin_signals[ON_NEGOTIATION_NEEDED_SIGNAL] =
+ g_signal_new ("on-negotiation-needed", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0);
+
+ /**
+ * GstWebRTCBin::on-ice-candidate:
+ * @object: the #webrtcbin
+ * @mline_index: the index of the media description in the SDP
+ * @candidate: the ICE candidate
+ */
+ gst_webrtc_bin_signals[ON_ICE_CANDIDATE_SIGNAL] =
+ g_signal_new ("on-ice-candidate", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
+ G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_STRING);
+
+ /**
+ * GstWebRTCBin::on-new-transceiver:
+ * @object: the #webrtcbin
+ * @candidate: the new #GstWebRTCRTPTransceiver
+ */
+ gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL] =
+ g_signal_new ("on-new-transceiver", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
+ G_TYPE_NONE, 1, GST_TYPE_WEBRTC_RTP_TRANSCEIVER);
+
+ /**
+ * GstWebRTCBin::on-data-channel:
+ * @object: the #GstWebRTCBin
+ * @candidate: the new `GstWebRTCDataChannel`
+ */
+ gst_webrtc_bin_signals[ON_DATA_CHANNEL_SIGNAL] =
+ g_signal_new ("on-data-channel", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
+ G_TYPE_NONE, 1, GST_TYPE_WEBRTC_DATA_CHANNEL);
+
+ /**
+ * GstWebRTCBin::add-transceiver:
+ * @object: the #webrtcbin
+ * @direction: the direction of the new transceiver
+ * @caps: (allow none): the codec preferences for this transceiver
+ *
+ * Returns: the new #GstWebRTCRTPTransceiver
+ */
+ gst_webrtc_bin_signals[ADD_TRANSCEIVER_SIGNAL] =
+ g_signal_new_class_handler ("add-transceiver", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_add_transceiver), NULL, NULL,
+ NULL, GST_TYPE_WEBRTC_RTP_TRANSCEIVER, 2,
+ GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, GST_TYPE_CAPS);
+
+ /**
+ * GstWebRTCBin::get-transceivers:
+ * @object: the #webrtcbin
+ *
+ * Returns: a #GArray of #GstWebRTCRTPTransceivers
+ */
+ gst_webrtc_bin_signals[GET_TRANSCEIVERS_SIGNAL] =
+ g_signal_new_class_handler ("get-transceivers", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_get_transceivers), NULL, NULL, NULL,
+ G_TYPE_ARRAY, 0);
+
+ /**
+ * GstWebRTCBin::get-transceiver:
+ * @object: the #GstWebRTCBin
+ * @idx: The index of the transceiver
+ *
+ * Returns: (transfer full): the #GstWebRTCRTPTransceiver, or %NULL
+ * Since: 1.16
+ */
+ gst_webrtc_bin_signals[GET_TRANSCEIVER_SIGNAL] =
+ g_signal_new_class_handler ("get-transceiver", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_get_transceiver), NULL, NULL, NULL,
+ GST_TYPE_WEBRTC_RTP_TRANSCEIVER, 1, G_TYPE_INT);
+
+ /**
+ * GstWebRTCBin::add-turn-server:
+ * @object: the #GstWebRTCBin
+ * @uri: The uri of the server of the form turn(s)://username:password@host:port
+ *
+ * Add a turn server to obtain ICE candidates from
+ */
+ gst_webrtc_bin_signals[ADD_TURN_SERVER_SIGNAL] =
+ g_signal_new_class_handler ("add-turn-server", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_add_turn_server), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 1, G_TYPE_STRING);
+
+ /*
+ * GstWebRTCBin::create-data-channel:
+ * @object: the #GstWebRTCBin
+ * @label: the label for the data channel
+ * @options: a #GstStructure of options for creating the data channel
+ *
+ * The options dictionary is the same format as the RTCDataChannelInit
+ * members outlined https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit and
+ * and reproduced below
+ *
+ * ordered G_TYPE_BOOLEAN Whether the channal will send data with guaranteed ordering
+ * max-packet-lifetime G_TYPE_INT The time in milliseconds to attempt transmitting unacknowledged data. -1 for unset
+ * max-retransmits G_TYPE_INT The number of times data will be attempted to be transmitted without acknowledgement before dropping
+ * protocol G_TYPE_STRING The subprotocol used by this channel
+ * negotiated G_TYPE_BOOLEAN Whether the created data channel should not perform in-band chnanel announcement. If %TRUE, then application must negotiate the channel itself and create the corresponding channel on the peer with the same id.
+ * id G_TYPE_INT Override the default identifier selection of this channel
+ * priority GST_TYPE_WEBRTC_PRIORITY_TYPE The priority to use for this channel
+ *
+ * Returns: (transfer full): a new data channel object
+ */
+ gst_webrtc_bin_signals[CREATE_DATA_CHANNEL_SIGNAL] =
+ g_signal_new_class_handler ("create-data-channel",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_bin_create_data_channel), NULL, NULL,
+ NULL, GST_TYPE_WEBRTC_DATA_CHANNEL, 2, G_TYPE_STRING, GST_TYPE_STRUCTURE);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_BIN_PAD, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_ICE, 0);
+ }
+
+ static void
+ _unparent_and_unref (GObject * object)
+ {
+ GstObject *obj = GST_OBJECT (object);
+
+ GST_OBJECT_PARENT (obj) = NULL;
+
+ gst_object_unref (obj);
+ }
+
+ static void
+ _transport_free (GObject * object)
+ {
+ TransportStream *stream = (TransportStream *) object;
+ GstWebRTCBin *webrtc;
+
+ webrtc = GST_WEBRTC_BIN (GST_OBJECT_PARENT (stream));
+
+ if (stream->transport) {
+ g_signal_handlers_disconnect_by_data (stream->transport->transport, webrtc);
+ g_signal_handlers_disconnect_by_data (stream->transport, webrtc);
+ }
+
+ gst_object_unref (object);
+ }
+
+ static void
+ gst_webrtc_bin_init (GstWebRTCBin * webrtc)
+ {
+ webrtc->priv = gst_webrtc_bin_get_instance_private (webrtc);
+ g_mutex_init (PC_GET_LOCK (webrtc));
+ g_cond_init (PC_GET_COND (webrtc));
+
+ g_mutex_init (ICE_GET_LOCK (webrtc));
+ g_mutex_init (DC_GET_LOCK (webrtc));
+
+ webrtc->rtpbin = _create_rtpbin (webrtc);
+ gst_bin_add (GST_BIN (webrtc), webrtc->rtpbin);
+
+ webrtc->priv->transceivers =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) _unparent_and_unref);
+ webrtc->priv->transports =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) _transport_free);
+
+ webrtc->priv->data_channels =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+
+ webrtc->priv->pending_data_channels =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+
+ webrtc->priv->ice_stream_map =
+ g_array_new (FALSE, TRUE, sizeof (IceStreamItem));
+ webrtc->priv->pending_remote_ice_candidates =
+ g_array_new (FALSE, TRUE, sizeof (IceCandidateItem));
+ g_array_set_clear_func (webrtc->priv->pending_remote_ice_candidates,
+ (GDestroyNotify) _clear_ice_candidate_item);
+
+ webrtc->priv->pending_local_ice_candidates =
+ g_array_new (FALSE, TRUE, sizeof (IceCandidateItem));
+ g_array_set_clear_func (webrtc->priv->pending_local_ice_candidates,
+ (GDestroyNotify) _clear_ice_candidate_item);
+
+ /* we start off closed until we move to READY */
+ webrtc->priv->is_closed = TRUE;
+ webrtc->priv->jb_latency = DEFAULT_JB_LATENCY;
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2017 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_WEBRTC_BIN_H__
+ #define __GST_WEBRTC_BIN_H__
+
+ #include <gst/sdp/sdp.h>
+ #include "fwd.h"
+ #include "gstwebrtcice.h"
+ #include "transportstream.h"
+ #include "webrtcsctptransport.h"
+
+ G_BEGIN_DECLS
+
+ GType gst_webrtc_bin_pad_get_type(void);
+ #define GST_TYPE_WEBRTC_BIN_PAD (gst_webrtc_bin_pad_get_type())
+ #define GST_WEBRTC_BIN_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_BIN_PAD,GstWebRTCBinPad))
+ #define GST_IS_WEBRTC_BIN_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_BIN_PAD))
+ #define GST_WEBRTC_BIN_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_BIN_PAD,GstWebRTCBinPadClass))
+ #define GST_IS_WEBRTC_BIN_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_BIN_PAD))
+ #define GST_WEBRTC_BIN_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_BIN_PAD,GstWebRTCBinPadClass))
+
+ typedef struct _GstWebRTCBinPad GstWebRTCBinPad;
+ typedef struct _GstWebRTCBinPadClass GstWebRTCBinPadClass;
+
+ struct _GstWebRTCBinPad
+ {
+ GstGhostPad parent;
+
+ GstWebRTCRTPTransceiver *trans;
+ gulong block_id;
+
+ GstCaps *received_caps;
+ };
+
+ struct _GstWebRTCBinPadClass
+ {
+ GstGhostPadClass parent_class;
+ };
+
+ GType gst_webrtc_bin_get_type(void);
+ #define GST_TYPE_WEBRTC_BIN (gst_webrtc_bin_get_type())
+ #define GST_WEBRTC_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_BIN,GstWebRTCBin))
+ #define GST_IS_WEBRTC_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_BIN))
+ #define GST_WEBRTC_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_BIN,GstWebRTCBinClass))
+ #define GST_IS_WEBRTC_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_BIN))
+ #define GST_WEBRTC_BIN_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_BIN,GstWebRTCBinClass))
+
+ struct _GstWebRTCBin
+ {
+ GstBin parent;
+
+ GstElement *rtpbin;
+ GstElement *rtpfunnel;
+
+ GstWebRTCSignalingState signaling_state;
+ GstWebRTCICEGatheringState ice_gathering_state;
++#ifdef __TIZEN__
++ GstWebRTCICEGatheringState pending_ice_gathering_state;
++#endif
+ GstWebRTCICEConnectionState ice_connection_state;
+ GstWebRTCPeerConnectionState peer_connection_state;
+
+ GstWebRTCSessionDescription *current_local_description;
+ GstWebRTCSessionDescription *pending_local_description;
+ GstWebRTCSessionDescription *current_remote_description;
+ GstWebRTCSessionDescription *pending_remote_description;
+
+ GstWebRTCBundlePolicy bundle_policy;
+ GstWebRTCICETransportPolicy ice_transport_policy;
+
+ GstWebRTCBinPrivate *priv;
+ };
+
+ struct _GstWebRTCBinClass
+ {
+ GstBinClass parent_class;
+ };
+
+ struct _GstWebRTCBinPrivate
+ {
+ guint max_sink_pad_serial;
+
+ gboolean bundle;
+ GPtrArray *transceivers;
+ GPtrArray *transports;
+ GPtrArray *data_channels;
+ /* list of data channels we've received a sctp stream for but no data
+ * channel protocol for */
+ GPtrArray *pending_data_channels;
+ /* dc_lock protects data_channels and pending_data_channels */
+ /* lock ordering is pc_lock first, then dc_lock */
+ GMutex dc_lock;
+
+ guint jb_latency;
+
+ WebRTCSCTPTransport *sctp_transport;
+ TransportStream *data_channel_transport;
+
+ GstWebRTCICE *ice;
+ GArray *ice_stream_map;
+ GMutex ice_lock;
+ GArray *pending_remote_ice_candidates;
+ GArray *pending_local_ice_candidates;
+
+ /* peerconnection variables */
+ gboolean is_closed;
+ gboolean need_negotiation;
+
+ /* peerconnection helper thread for promises */
+ GMainContext *main_context;
+ GMainLoop *loop;
+ GThread *thread;
+ GMutex pc_lock;
+ GCond pc_cond;
+
+ gboolean running;
+ gboolean async_pending;
+
+ GList *pending_pads;
+ GList *pending_sink_transceivers;
+
+ /* count of the number of media streams we've offered for uniqueness */
+ /* FIXME: overflow? */
+ guint media_counter;
+ /* the number of times create_offer has been called for the version field */
+ guint offer_count;
+ GstWebRTCSessionDescription *last_generated_offer;
+ GstWebRTCSessionDescription *last_generated_answer;
+
+ gboolean tos_attached;
++#ifdef TIZEN_FEATURE_IMPORT_NETSIM
++ gboolean netsim;
++ gfloat drop_probability_sender;
++ gfloat drop_probability_receiver;
++#endif
+ };
+
+ typedef GstStructure *(*GstWebRTCBinFunc) (GstWebRTCBin * webrtc, gpointer data);
+
+ typedef struct
+ {
+ GstWebRTCBin *webrtc;
+ GstWebRTCBinFunc op;
+ gpointer data;
+ GDestroyNotify notify;
+ GstPromise *promise;
+ } GstWebRTCBinTask;
+
+ gboolean gst_webrtc_bin_enqueue_task (GstWebRTCBin * pc,
+ GstWebRTCBinFunc func,
+ gpointer data,
+ GDestroyNotify notify,
+ GstPromise *promise);
+
+ G_END_DECLS
+
+ #endif /* __GST_WEBRTC_BIN_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2017 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include "gstwebrtcice.h"
+ /* libnice */
+ #include <agent.h>
+ #include "icestream.h"
+ #include "nicetransport.h"
+
+ /* XXX:
+ *
+ * - are locally generated remote candidates meant to be readded to libnice?
+ */
+
+ static GstUri *_validate_turn_server (GstWebRTCICE * ice, const gchar * s);
+
+ #define GST_CAT_DEFAULT gst_webrtc_ice_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ GQuark
+ gst_webrtc_ice_error_quark (void)
+ {
+ return g_quark_from_static_string ("gst-webrtc-ice-error-quark");
+ }
+
+ enum
+ {
+ SIGNAL_0,
+ ADD_LOCAL_IP_ADDRESS_SIGNAL,
+ LAST_SIGNAL,
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_AGENT,
+ PROP_ICE_TCP,
+ PROP_ICE_UDP,
+ PROP_MIN_RTP_PORT,
+ PROP_MAX_RTP_PORT,
+ };
+
+ static guint gst_webrtc_ice_signals[LAST_SIGNAL] = { 0 };
+
+ struct _GstWebRTCICEPrivate
+ {
+ NiceAgent *nice_agent;
+
+ GArray *nice_stream_map;
+
+ GThread *thread;
+ GMainContext *main_context;
+ GMainLoop *loop;
+ GMutex lock;
+ GCond cond;
+
+ GstWebRTCIceOnCandidateFunc on_candidate;
+ gpointer on_candidate_data;
+ GDestroyNotify on_candidate_notify;
+ };
+
+ #define gst_webrtc_ice_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstWebRTCICE, gst_webrtc_ice,
+ GST_TYPE_OBJECT, G_ADD_PRIVATE (GstWebRTCICE)
+ GST_DEBUG_CATEGORY_INIT (gst_webrtc_ice_debug, "webrtcice", 0,
+ "webrtcice"););
+
+ static gboolean
+ _unlock_pc_thread (GMutex * lock)
+ {
+ g_mutex_unlock (lock);
+ return G_SOURCE_REMOVE;
+ }
+
+ static gpointer
+ _gst_nice_thread (GstWebRTCICE * ice)
+ {
+ g_mutex_lock (&ice->priv->lock);
+ ice->priv->main_context = g_main_context_new ();
+ ice->priv->loop = g_main_loop_new (ice->priv->main_context, FALSE);
+
+ g_cond_broadcast (&ice->priv->cond);
+ g_main_context_invoke (ice->priv->main_context,
+ (GSourceFunc) _unlock_pc_thread, &ice->priv->lock);
+
+ g_main_loop_run (ice->priv->loop);
+
+ g_mutex_lock (&ice->priv->lock);
+ g_main_context_unref (ice->priv->main_context);
+ ice->priv->main_context = NULL;
+ g_main_loop_unref (ice->priv->loop);
+ ice->priv->loop = NULL;
+ g_cond_broadcast (&ice->priv->cond);
+ g_mutex_unlock (&ice->priv->lock);
+
+ return NULL;
+ }
+
+ static void
+ _start_thread (GstWebRTCICE * ice)
+ {
+ g_mutex_lock (&ice->priv->lock);
+ ice->priv->thread = g_thread_new (GST_OBJECT_NAME (ice),
+ (GThreadFunc) _gst_nice_thread, ice);
+
+ while (!ice->priv->loop)
+ g_cond_wait (&ice->priv->cond, &ice->priv->lock);
+ g_mutex_unlock (&ice->priv->lock);
+ }
+
+ static void
+ _stop_thread (GstWebRTCICE * ice)
+ {
+ g_mutex_lock (&ice->priv->lock);
+ g_main_loop_quit (ice->priv->loop);
+ while (ice->priv->loop)
+ g_cond_wait (&ice->priv->cond, &ice->priv->lock);
+ g_mutex_unlock (&ice->priv->lock);
+
+ g_thread_unref (ice->priv->thread);
+ }
+
+ struct NiceStreamItem
+ {
+ guint session_id;
+ guint nice_stream_id;
+ GstWebRTCICEStream *stream;
+ };
+
+ /* TRUE to continue, FALSE to stop */
+ typedef gboolean (*NiceStreamItemForeachFunc) (struct NiceStreamItem * item,
+ gpointer user_data);
+
+ static void
+ _nice_stream_item_foreach (GstWebRTCICE * ice, NiceStreamItemForeachFunc func,
+ gpointer data)
+ {
+ int i, len;
+
+ len = ice->priv->nice_stream_map->len;
+ for (i = 0; i < len; i++) {
+ struct NiceStreamItem *item =
+ &g_array_index (ice->priv->nice_stream_map, struct NiceStreamItem,
+ i);
+
+ if (!func (item, data))
+ break;
+ }
+ }
+
+ /* TRUE for match, FALSE otherwise */
+ typedef gboolean (*NiceStreamItemFindFunc) (struct NiceStreamItem * item,
+ gpointer user_data);
+
+ struct nice_find
+ {
+ NiceStreamItemFindFunc func;
+ gpointer data;
+ struct NiceStreamItem *ret;
+ };
+
+ static gboolean
+ _find_nice_item (struct NiceStreamItem *item, gpointer user_data)
+ {
+ struct nice_find *f = user_data;
+ if (f->func (item, f->data)) {
+ f->ret = item;
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ static struct NiceStreamItem *
+ _nice_stream_item_find (GstWebRTCICE * ice, NiceStreamItemFindFunc func,
+ gpointer data)
+ {
+ struct nice_find f;
+
+ f.func = func;
+ f.data = data;
+ f.ret = NULL;
+
+ _nice_stream_item_foreach (ice, _find_nice_item, &f);
+
+ return f.ret;
+ }
+
+ #define NICE_MATCH_INIT { -1, -1, NULL }
+
+ static gboolean
+ _match (struct NiceStreamItem *item, struct NiceStreamItem *m)
+ {
+ if (m->session_id != -1 && m->session_id != item->session_id)
+ return FALSE;
+ if (m->nice_stream_id != -1 && m->nice_stream_id != item->nice_stream_id)
+ return FALSE;
+ if (m->stream != NULL && m->stream != item->stream)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static struct NiceStreamItem *
+ _find_item (GstWebRTCICE * ice, guint session_id, guint nice_stream_id,
+ GstWebRTCICEStream * stream)
+ {
+ struct NiceStreamItem m = NICE_MATCH_INIT;
+
+ m.session_id = session_id;
+ m.nice_stream_id = nice_stream_id;
+ m.stream = stream;
+
+ return _nice_stream_item_find (ice, (NiceStreamItemFindFunc) _match, &m);
+ }
+
+ static struct NiceStreamItem *
+ _create_nice_stream_item (GstWebRTCICE * ice, guint session_id)
+ {
+ struct NiceStreamItem item;
+
+ item.session_id = session_id;
+ item.nice_stream_id = nice_agent_add_stream (ice->priv->nice_agent, 1);
+ item.stream = gst_webrtc_ice_stream_new (ice, item.nice_stream_id);
+ g_array_append_val (ice->priv->nice_stream_map, item);
+
+ return _find_item (ice, item.session_id, item.nice_stream_id, item.stream);
+ }
+
+ static void
+ _parse_userinfo (const gchar * userinfo, gchar ** user, gchar ** pass)
+ {
+ const gchar *colon;
+
+ if (!userinfo) {
+ *user = NULL;
+ *pass = NULL;
+ return;
+ }
+
+ colon = g_strstr_len (userinfo, -1, ":");
+ if (!colon) {
+ *user = g_uri_unescape_string (userinfo, NULL);
+ *pass = NULL;
+ return;
+ }
+
+ /* Check that the first occurence is also the last occurence */
+ if (colon != g_strrstr (userinfo, ":"))
+ GST_WARNING ("userinfo %s contains more than one ':', will assume that the "
+ "first ':' delineates user:pass. You should escape the user and pass "
+ "before adding to the URI.", userinfo);
+
+ *user = g_uri_unescape_segment (userinfo, colon, NULL);
+ *pass = g_uri_unescape_string (&colon[1], NULL);
+ }
+
+ static gchar *
+ _resolve_host (GstWebRTCICE * ice, const gchar * host)
+ {
+ GResolver *resolver = g_resolver_get_default ();
+ GError *error = NULL;
+ GInetAddress *addr;
+ GList *addresses;
+ gchar *address;
+
+ GST_DEBUG_OBJECT (ice, "Resolving host %s", host);
+
+ if (!(addresses = g_resolver_lookup_by_name (resolver, host, NULL, &error))) {
+ GST_ERROR ("%s", error->message);
+ g_clear_error (&error);
+ return NULL;
+ }
+
+ GST_DEBUG_OBJECT (ice, "Resolved %d addresses for host %s",
+ g_list_length (addresses), host);
+
+ /* XXX: only the first address is used */
+ addr = addresses->data;
+ address = g_inet_address_to_string (addr);
+ g_resolver_free_addresses (addresses);
+
+ return address;
+ }
+
+ static void
+ _add_turn_server (GstWebRTCICE * ice, struct NiceStreamItem *item,
+ GstUri * turn_server)
+ {
+ gboolean ret;
+ gchar *user, *pass;
+ const gchar *host, *userinfo, *transport, *scheme;
+ NiceRelayType relays[4] = { 0, };
+ int i, relay_n = 0;
+ gchar *ip = NULL;
+
+ host = gst_uri_get_host (turn_server);
+ if (!host) {
+ GST_ERROR_OBJECT (ice, "Turn server has no host");
+ goto out;
+ }
+ ip = _resolve_host (ice, host);
+ if (!ip) {
+ GST_ERROR_OBJECT (ice, "Failed to resolve turn server '%s'", host);
+ goto out;
+ }
+
+ /* Set the resolved IP as the host since that's what libnice wants */
+ gst_uri_set_host (turn_server, ip);
+
+ scheme = gst_uri_get_scheme (turn_server);
+ transport = gst_uri_get_query_value (turn_server, "transport");
+ userinfo = gst_uri_get_userinfo (turn_server);
+ _parse_userinfo (userinfo, &user, &pass);
+
+ if (g_strcmp0 (scheme, "turns") == 0) {
+ relays[relay_n++] = NICE_RELAY_TYPE_TURN_TLS;
+ } else if (g_strcmp0 (scheme, "turn") == 0) {
+ if (!transport || g_strcmp0 (transport, "udp") == 0)
+ relays[relay_n++] = NICE_RELAY_TYPE_TURN_UDP;
+ if (!transport || g_strcmp0 (transport, "tcp") == 0)
+ relays[relay_n++] = NICE_RELAY_TYPE_TURN_TCP;
+ }
+ g_assert (relay_n < G_N_ELEMENTS (relays));
+
+ for (i = 0; i < relay_n; i++) {
+ ret = nice_agent_set_relay_info (ice->priv->nice_agent,
+ item->nice_stream_id, NICE_COMPONENT_TYPE_RTP,
+ gst_uri_get_host (turn_server), gst_uri_get_port (turn_server),
+ user, pass, relays[i]);
+ if (!ret) {
+ gchar *uri = gst_uri_to_string (turn_server);
+ GST_ERROR_OBJECT (ice, "Failed to set TURN server '%s'", uri);
+ g_free (uri);
+ break;
+ }
+ }
+ g_free (user);
+ g_free (pass);
+
+ out:
+ g_free (ip);
+ }
+
+ typedef struct
+ {
+ GstWebRTCICE *ice;
+ struct NiceStreamItem *item;
+ } AddTurnServerData;
+
+ static void
+ _add_turn_server_func (const gchar * uri, GstUri * turn_server,
+ AddTurnServerData * data)
+ {
+ _add_turn_server (data->ice, data->item, turn_server);
+ }
+
+ static void
+ _add_stun_server (GstWebRTCICE * ice, GstUri * stun_server)
+ {
+ const gchar *msg = "must be of the form stun://<host>:<port>";
+ const gchar *host;
+ gchar *s = NULL;
+ gchar *ip = NULL;
+ guint port;
+
+ s = gst_uri_to_string (stun_server);
+ GST_DEBUG_OBJECT (ice, "adding stun server, %s", s);
+
+ host = gst_uri_get_host (stun_server);
+ if (!host) {
+ GST_ERROR_OBJECT (ice, "Stun server '%s' has no host, %s", s, msg);
+ goto out;
+ }
+
+ port = gst_uri_get_port (stun_server);
+ if (port == GST_URI_NO_PORT) {
+ GST_INFO_OBJECT (ice, "Stun server '%s' has no port, assuming 3478", s);
+ port = 3478;
+ gst_uri_set_port (stun_server, port);
+ }
+
+ ip = _resolve_host (ice, host);
+ if (!ip) {
+ GST_ERROR_OBJECT (ice, "Failed to resolve stun server '%s'", host);
+ goto out;
+ }
+
+ g_object_set (ice->priv->nice_agent, "stun-server", ip,
+ "stun-server-port", port, NULL);
+
+ out:
+ g_free (s);
+ g_free (ip);
+ }
+
+ GstWebRTCICEStream *
+ gst_webrtc_ice_add_stream (GstWebRTCICE * ice, guint session_id)
+ {
+ struct NiceStreamItem m = NICE_MATCH_INIT;
+ struct NiceStreamItem *item;
+ AddTurnServerData add_data;
+
+ m.session_id = session_id;
+ item = _nice_stream_item_find (ice, (NiceStreamItemFindFunc) _match, &m);
+ if (item) {
+ GST_ERROR_OBJECT (ice, "stream already added with session_id=%u",
+ session_id);
+ return 0;
+ }
+
+ if (ice->stun_server) {
+ _add_stun_server (ice, ice->stun_server);
+ }
+
+ item = _create_nice_stream_item (ice, session_id);
+
+ if (ice->turn_server) {
+ _add_turn_server (ice, item, ice->turn_server);
+ }
+
+ add_data.ice = ice;
+ add_data.item = item;
+
+ g_hash_table_foreach (ice->turn_servers, (GHFunc) _add_turn_server_func,
+ &add_data);
+
+ return item->stream;
+ }
+
+ static void
+ _on_new_candidate (NiceAgent * agent, NiceCandidate * candidate,
+ GstWebRTCICE * ice)
+ {
+ struct NiceStreamItem *item;
+ gchar *attr;
+
+ item = _find_item (ice, -1, candidate->stream_id, NULL);
+ if (!item) {
+ GST_WARNING_OBJECT (ice, "received signal for non-existent stream %u",
+ candidate->stream_id);
+ return;
+ }
+
+ if (!candidate->username || !candidate->password) {
+ gboolean got_credentials;
+ gchar *ufrag, *password;
+
+ got_credentials = nice_agent_get_local_credentials (ice->priv->nice_agent,
+ candidate->stream_id, &ufrag, &password);
+ g_warn_if_fail (got_credentials);
+
+ if (!candidate->username)
+ candidate->username = ufrag;
+ else
+ g_free (ufrag);
+
+ if (!candidate->password)
+ candidate->password = password;
+ else
+ g_free (password);
+ }
+
+ attr = nice_agent_generate_local_candidate_sdp (agent, candidate);
+
+ if (ice->priv->on_candidate)
+ ice->priv->on_candidate (ice, item->session_id, attr,
+ ice->priv->on_candidate_data);
+
+ g_free (attr);
+ }
+
+ GstWebRTCICETransport *
+ gst_webrtc_ice_find_transport (GstWebRTCICE * ice, GstWebRTCICEStream * stream,
+ GstWebRTCICEComponent component)
+ {
+ struct NiceStreamItem *item;
+
+ item = _find_item (ice, -1, -1, stream);
+ g_return_val_if_fail (item != NULL, NULL);
+
+ return gst_webrtc_ice_stream_find_transport (item->stream, component);
+ }
+
+ #if 0
+ /* TODO don't rely on libnice to (de)serialize candidates */
+ static NiceCandidateType
+ _candidate_type_from_string (const gchar * s)
+ {
+ if (g_strcmp0 (s, "host") == 0) {
+ return NICE_CANDIDATE_TYPE_HOST;
+ } else if (g_strcmp0 (s, "srflx") == 0) {
+ return NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE;
+ } else if (g_strcmp0 (s, "prflx") == 0) { /* FIXME: is the right string? */
+ return NICE_CANDIDATE_TYPE_PEER_REFLEXIVE;
+ } else if (g_strcmp0 (s, "relay") == 0) {
+ return NICE_CANDIDATE_TYPE_RELAY;
+ } else {
+ g_assert_not_reached ();
+ return 0;
+ }
+ }
+
+ static const gchar *
+ _candidate_type_to_string (NiceCandidateType type)
+ {
+ switch (type) {
+ case NICE_CANDIDATE_TYPE_HOST:
+ return "host";
+ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE:
+ return "srflx";
+ case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE:
+ return "prflx";
+ case NICE_CANDIDATE_TYPE_RELAY:
+ return "relay";
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+ }
+
+ static NiceCandidateTransport
+ _candidate_transport_from_string (const gchar * s)
+ {
+ if (g_strcmp0 (s, "UDP") == 0) {
+ return NICE_CANDIDATE_TRANSPORT_UDP;
+ } else if (g_strcmp0 (s, "TCP tcptype") == 0) {
+ return NICE_CANDIDATE_TRANSPORT_TCP_ACTIVE;
+ } else if (g_strcmp0 (s, "tcp-passive") == 0) { /* FIXME: is the right string? */
+ return NICE_CANDIDATE_TRANSPORT_TCP_PASSIVE;
+ } else if (g_strcmp0 (s, "tcp-so") == 0) {
+ return NICE_CANDIDATE_TRANSPORT_TCP_SO;
+ } else {
+ g_assert_not_reached ();
+ return 0;
+ }
+ }
+
+ static const gchar *
+ _candidate_type_to_string (NiceCandidateType type)
+ {
+ switch (type) {
+ case NICE_CANDIDATE_TYPE_HOST:
+ return "host";
+ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE:
+ return "srflx";
+ case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE:
+ return "prflx";
+ case NICE_CANDIDATE_TYPE_RELAY:
+ return "relay";
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+ }
+ #endif
+
+ /* parse the address for possible resolution */
+ static gboolean
+ get_candidate_address (const gchar * candidate, gchar ** prefix,
+ gchar ** address, gchar ** postfix)
+ {
+ char **tokens = NULL;
+
+ if (!g_str_has_prefix (candidate, "a=candidate:")) {
+ GST_ERROR ("candidate \"%s\" does not start with \"a=candidate:\"",
+ candidate);
+ goto failure;
+ }
+
+ if (!(tokens = g_strsplit (candidate, " ", 6))) {
+ GST_ERROR ("candidate \"%s\" could not be tokenized", candidate);
+ goto failure;
+ }
+
+ if (g_strv_length (tokens) < 6) {
+ GST_ERROR ("candidate \"%s\" tokenization resulted in not enough tokens",
+ candidate);
+ goto failure;
+ }
+
+ if (address)
+ *address = g_strdup (tokens[4]);
+ tokens[4] = NULL;
+ if (prefix)
+ *prefix = g_strjoinv (" ", tokens);
+ if (postfix)
+ *postfix = g_strdup (tokens[5]);
+
+ g_strfreev (tokens);
+ return TRUE;
+
+ failure:
+ if (tokens)
+ g_strfreev (tokens);
+ return FALSE;
+ }
+
+ /* candidate must start with "a=candidate:" or be NULL*/
+ void
+ gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream,
+ const gchar * candidate)
+ {
+ struct NiceStreamItem *item;
+ NiceCandidate *cand;
+ GSList *candidates = NULL;
+
+ item = _find_item (ice, -1, -1, stream);
+ g_return_if_fail (item != NULL);
+
+ if (candidate == NULL) {
+ nice_agent_peer_candidate_gathering_done (ice->priv->nice_agent,
+ item->nice_stream_id);
+ return;
+ }
+
+ cand =
+ nice_agent_parse_remote_candidate_sdp (ice->priv->nice_agent,
+ item->nice_stream_id, candidate);
+ if (!cand) {
+ /* might be a .local candidate */
+ char *prefix = NULL, *address = NULL, *postfix = NULL;
++#ifndef __TIZEN__
+ char *new_addr, *new_candidate;
++#else
++ char *new_addr = NULL, *new_candidate = NULL;
++#endif
+ char *new_candv[4] = { NULL, };
+
+ if (!get_candidate_address (candidate, &prefix, &address, &postfix)) {
+ GST_WARNING_OBJECT (ice, "Failed to retrieve address from candidate %s",
+ candidate);
+ goto fail;
+ }
+
+ if (!g_str_has_suffix (address, ".local")) {
+ GST_WARNING_OBJECT (ice, "candidate address \'%s\' does not end "
+ "with \'.local\'", address);
+ goto fail;
+ }
+
+ /* FIXME: async */
+ if (!(new_addr = _resolve_host (ice, address))) {
+ GST_WARNING_OBJECT (ice, "Failed to resolve %s", address);
+ goto fail;
+ }
+
+ new_candv[0] = prefix;
+ new_candv[1] = new_addr;
+ new_candv[2] = postfix;
+ new_candv[3] = NULL;
+ new_candidate = g_strjoinv (" ", new_candv);
+
+ GST_DEBUG_OBJECT (ice, "resolved to candidate %s", new_candidate);
+
+ cand =
+ nice_agent_parse_remote_candidate_sdp (ice->priv->nice_agent,
+ item->nice_stream_id, new_candidate);
++#ifndef __TIZEN__
+ g_free (new_candidate);
++#endif
+ if (!cand) {
+ GST_WARNING_OBJECT (ice, "Could not parse candidate \'%s\'",
+ new_candidate);
+ goto fail;
+ }
+
++#ifdef __TIZEN__
++ g_free (new_addr);
++ g_free (new_candidate);
++ g_free (prefix);
++ g_free (address);
++ g_free (postfix);
++#else
+ g_free (prefix);
+ g_free (new_addr);
+ g_free (postfix);
++#endif
+
+ if (0) {
+ fail:
++#ifdef __TIZEN__
++ g_free (new_addr);
++ g_free (new_candidate);
++#endif
+ g_free (prefix);
+ g_free (address);
+ g_free (postfix);
+ return;
+ }
+ }
+
+ if (cand->component_id == 2) {
+ /* we only support rtcp-mux so rtcp candidates are useless for us */
+ GST_INFO_OBJECT (ice, "Dropping RTCP candidate %s", candidate);
+ nice_candidate_free (cand);
+ return;
+ }
+
+ candidates = g_slist_append (candidates, cand);
+
+ nice_agent_set_remote_candidates (ice->priv->nice_agent, item->nice_stream_id,
+ cand->component_id, candidates);
+
+ g_slist_free (candidates);
+ nice_candidate_free (cand);
+ }
+
+ gboolean
+ gst_webrtc_ice_set_remote_credentials (GstWebRTCICE * ice,
+ GstWebRTCICEStream * stream, gchar * ufrag, gchar * pwd)
+ {
+ struct NiceStreamItem *item;
+
+ g_return_val_if_fail (ufrag != NULL, FALSE);
+ g_return_val_if_fail (pwd != NULL, FALSE);
+ item = _find_item (ice, -1, -1, stream);
+ g_return_val_if_fail (item != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (ice, "Setting remote ICE credentials on "
+ "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd);
+
+ nice_agent_set_remote_credentials (ice->priv->nice_agent,
+ item->nice_stream_id, ufrag, pwd);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_webrtc_ice_add_turn_server (GstWebRTCICE * ice, const gchar * uri)
+ {
+ gboolean ret = FALSE;
+ GstUri *valid_uri;
+
+ if (!(valid_uri = _validate_turn_server (ice, uri)))
+ goto done;
+
+ g_hash_table_insert (ice->turn_servers, g_strdup (uri), valid_uri);
+
+ ret = TRUE;
+
+ done:
+ return ret;
+ }
+
+ static gboolean
+ gst_webrtc_ice_add_local_ip_address (GstWebRTCICE * ice, const gchar * address)
+ {
+ gboolean ret = FALSE;
+ NiceAddress nice_addr;
+
+ nice_address_init (&nice_addr);
+
+ ret = nice_address_set_from_string (&nice_addr, address);
+
+ if (ret) {
+ ret = nice_agent_add_local_address (ice->priv->nice_agent, &nice_addr);
+ if (!ret) {
+ GST_ERROR_OBJECT (ice, "Failed to add local address to NiceAgent");
+ }
+ } else {
+ GST_ERROR_OBJECT (ice, "Failed to initialize NiceAddress [%s]", address);
+ }
+
+ return ret;
+ }
+
+ gboolean
+ gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice,
+ GstWebRTCICEStream * stream, gchar * ufrag, gchar * pwd)
+ {
+ struct NiceStreamItem *item;
+
+ g_return_val_if_fail (ufrag != NULL, FALSE);
+ g_return_val_if_fail (pwd != NULL, FALSE);
+ item = _find_item (ice, -1, -1, stream);
+ g_return_val_if_fail (item != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (ice, "Setting local ICE credentials on "
+ "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd);
+
+ nice_agent_set_local_credentials (ice->priv->nice_agent, item->nice_stream_id,
+ ufrag, pwd);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_webrtc_ice_gather_candidates (GstWebRTCICE * ice,
+ GstWebRTCICEStream * stream)
+ {
+ struct NiceStreamItem *item;
+
+ item = _find_item (ice, -1, -1, stream);
+ g_return_val_if_fail (item != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (ice, "gather candidates for stream %u",
+ item->nice_stream_id);
+
+ return gst_webrtc_ice_stream_gather_candidates (stream);
+ }
+
+ void
+ gst_webrtc_ice_set_is_controller (GstWebRTCICE * ice, gboolean controller)
+ {
+ g_object_set (G_OBJECT (ice->priv->nice_agent), "controlling-mode",
+ controller, NULL);
+ }
+
+ gboolean
+ gst_webrtc_ice_get_is_controller (GstWebRTCICE * ice)
+ {
+ gboolean ret;
+ g_object_get (G_OBJECT (ice->priv->nice_agent), "controlling-mode",
+ &ret, NULL);
+ return ret;
+ }
+
+ void
+ gst_webrtc_ice_set_force_relay (GstWebRTCICE * ice, gboolean force_relay)
+ {
+ g_object_set (G_OBJECT (ice->priv->nice_agent), "force-relay", force_relay,
+ NULL);
+ }
+
+ void
+ gst_webrtc_ice_set_on_ice_candidate (GstWebRTCICE * ice,
+ GstWebRTCIceOnCandidateFunc func, gpointer user_data, GDestroyNotify notify)
+ {
+ if (ice->priv->on_candidate_notify)
+ ice->priv->on_candidate_notify (ice->priv->on_candidate_data);
+ ice->priv->on_candidate = NULL;
+
+ ice->priv->on_candidate = func;
+ ice->priv->on_candidate_data = user_data;
+ ice->priv->on_candidate_notify = notify;
+ }
+
+ void
+ gst_webrtc_ice_set_tos (GstWebRTCICE * ice, GstWebRTCICEStream * stream,
+ guint tos)
+ {
+ struct NiceStreamItem *item;
+
+ item = _find_item (ice, -1, -1, stream);
+ g_return_if_fail (item != NULL);
+
+ nice_agent_set_stream_tos (ice->priv->nice_agent, item->nice_stream_id, tos);
+ }
+
+ static void
+ _clear_ice_stream (struct NiceStreamItem *item)
+ {
+ if (!item)
+ return;
+
+ if (item->stream) {
+ g_signal_handlers_disconnect_by_data (item->stream->ice->priv->nice_agent,
+ item->stream);
+ gst_object_unref (item->stream);
+ }
+ }
+
+ static GstUri *
+ _validate_turn_server (GstWebRTCICE * ice, const gchar * s)
+ {
+ GstUri *uri = gst_uri_from_string_escaped (s);
+ const gchar *userinfo, *scheme;
+ GList *keys = NULL, *l;
+ gchar *user = NULL, *pass = NULL;
+ gboolean turn_tls = FALSE;
+ guint port;
+
+ GST_DEBUG_OBJECT (ice, "validating turn server, %s", s);
+
+ if (!uri) {
+ GST_ERROR_OBJECT (ice, "Could not parse turn server '%s'", s);
+ return NULL;
+ }
+
+ scheme = gst_uri_get_scheme (uri);
+ if (g_strcmp0 (scheme, "turn") == 0) {
+ } else if (g_strcmp0 (scheme, "turns") == 0) {
+ turn_tls = TRUE;
+ } else {
+ GST_ERROR_OBJECT (ice, "unknown scheme '%s'", scheme);
+ goto out;
+ }
+
+ keys = gst_uri_get_query_keys (uri);
+ for (l = keys; l; l = l->next) {
+ gchar *key = l->data;
+
+ if (g_strcmp0 (key, "transport") == 0) {
+ const gchar *transport = gst_uri_get_query_value (uri, "transport");
+ if (!transport) {
+ } else if (g_strcmp0 (transport, "udp") == 0) {
+ } else if (g_strcmp0 (transport, "tcp") == 0) {
+ } else {
+ GST_ERROR_OBJECT (ice, "unknown transport value, '%s'", transport);
+ goto out;
+ }
+ } else {
+ GST_ERROR_OBJECT (ice, "unknown query key, '%s'", key);
+ goto out;
+ }
+ }
+
+ /* TODO: Implement error checking similar to the stun server below */
+ userinfo = gst_uri_get_userinfo (uri);
+ _parse_userinfo (userinfo, &user, &pass);
+ if (!user) {
+ GST_ERROR_OBJECT (ice, "No username specified in '%s'", s);
+ goto out;
+ }
+ if (!pass) {
+ GST_ERROR_OBJECT (ice, "No password specified in '%s'", s);
+ goto out;
+ }
+
+ port = gst_uri_get_port (uri);
+
+ if (port == GST_URI_NO_PORT) {
+ if (turn_tls) {
+ gst_uri_set_port (uri, 5349);
+ } else {
+ gst_uri_set_port (uri, 3478);
+ }
+ }
+
+ out:
+ g_list_free (keys);
+ g_free (user);
+ g_free (pass);
+
+ return uri;
+ }
+
+ void
+ gst_webrtc_ice_set_stun_server (GstWebRTCICE * ice, const gchar * uri_s)
+ {
+ GstUri *uri = gst_uri_from_string_escaped (uri_s);
+ const gchar *msg = "must be of the form stun://<host>:<port>";
+
+ GST_DEBUG_OBJECT (ice, "setting stun server, %s", uri_s);
+
+ if (!uri) {
+ GST_ERROR_OBJECT (ice, "Couldn't parse stun server '%s', %s", uri_s, msg);
+ return;
+ }
+
+ if (ice->stun_server)
+ gst_uri_unref (ice->stun_server);
+ ice->stun_server = uri;
+ }
+
+ gchar *
+ gst_webrtc_ice_get_stun_server (GstWebRTCICE * ice)
+ {
+ if (ice->stun_server)
+ return gst_uri_to_string (ice->stun_server);
+ else
+ return NULL;
+ }
+
+ void
+ gst_webrtc_ice_set_turn_server (GstWebRTCICE * ice, const gchar * uri_s)
+ {
+ GstUri *uri = _validate_turn_server (ice, uri_s);
+
+ if (uri) {
+ if (ice->turn_server)
+ gst_uri_unref (ice->turn_server);
+ ice->turn_server = uri;
+ }
+ }
+
+ gchar *
+ gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice)
+ {
+ if (ice->turn_server)
+ return gst_uri_to_string (ice->turn_server);
+ else
+ return NULL;
+ }
+
+ static void
+ gst_webrtc_ice_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstWebRTCICE *ice = GST_WEBRTC_ICE (object);
+
+ switch (prop_id) {
+ case PROP_ICE_TCP:
+ g_object_set_property (G_OBJECT (ice->priv->nice_agent),
+ "ice-tcp", value);
+ break;
+ case PROP_ICE_UDP:
+ g_object_set_property (G_OBJECT (ice->priv->nice_agent),
+ "ice-udp", value);
+ break;
+
+ case PROP_MIN_RTP_PORT:
+ ice->min_rtp_port = g_value_get_uint (value);
+ if (ice->min_rtp_port > ice->max_rtp_port)
+ g_warning ("Set min-rtp-port to %u which is larger than"
+ " max-rtp-port %u", ice->min_rtp_port, ice->max_rtp_port);
+ break;
+
+ case PROP_MAX_RTP_PORT:
+ ice->max_rtp_port = g_value_get_uint (value);
+ if (ice->min_rtp_port > ice->max_rtp_port)
+ g_warning ("Set max-rtp-port to %u which is smaller than"
+ " min-rtp-port %u", ice->max_rtp_port, ice->min_rtp_port);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_webrtc_ice_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstWebRTCICE *ice = GST_WEBRTC_ICE (object);
+
+ switch (prop_id) {
+ case PROP_AGENT:
+ g_value_set_object (value, ice->priv->nice_agent);
+ break;
+ case PROP_ICE_TCP:
+ g_object_get_property (G_OBJECT (ice->priv->nice_agent),
+ "ice-tcp", value);
+ break;
+ case PROP_ICE_UDP:
+ g_object_get_property (G_OBJECT (ice->priv->nice_agent),
+ "ice-udp", value);
+ break;
+
+ case PROP_MIN_RTP_PORT:
+ g_value_set_uint (value, ice->min_rtp_port);
+ break;
+
+ case PROP_MAX_RTP_PORT:
+ g_value_set_uint (value, ice->max_rtp_port);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_webrtc_ice_finalize (GObject * object)
+ {
+ GstWebRTCICE *ice = GST_WEBRTC_ICE (object);
+
+ g_signal_handlers_disconnect_by_data (ice->priv->nice_agent, ice);
+
+ _stop_thread (ice);
+
+ if (ice->priv->on_candidate_notify)
+ ice->priv->on_candidate_notify (ice->priv->on_candidate_data);
+ ice->priv->on_candidate = NULL;
+ ice->priv->on_candidate_notify = NULL;
+
+ if (ice->turn_server)
+ gst_uri_unref (ice->turn_server);
+ if (ice->stun_server)
+ gst_uri_unref (ice->stun_server);
+
+ g_mutex_clear (&ice->priv->lock);
+ g_cond_clear (&ice->priv->cond);
+
+ g_array_free (ice->priv->nice_stream_map, TRUE);
+
+ g_object_unref (ice->priv->nice_agent);
+
+ g_hash_table_unref (ice->turn_servers);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_webrtc_ice_constructed (GObject * object)
+ {
+ GstWebRTCICE *ice = GST_WEBRTC_ICE (object);
+ NiceAgentOption options = 0;
+
+ _start_thread (ice);
+
+ options |= NICE_AGENT_OPTION_ICE_TRICKLE;
+ options |= NICE_AGENT_OPTION_REGULAR_NOMINATION;
+
+ ice->priv->nice_agent = nice_agent_new_full (ice->priv->main_context,
+ NICE_COMPATIBILITY_RFC5245, options);
+ g_signal_connect (ice->priv->nice_agent, "new-candidate-full",
+ G_CALLBACK (_on_new_candidate), ice);
+
+ G_OBJECT_CLASS (parent_class)->constructed (object);
+ }
+
+ static void
+ gst_webrtc_ice_class_init (GstWebRTCICEClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->constructed = gst_webrtc_ice_constructed;
+ gobject_class->get_property = gst_webrtc_ice_get_property;
+ gobject_class->set_property = gst_webrtc_ice_set_property;
+ gobject_class->finalize = gst_webrtc_ice_finalize;
+
+ g_object_class_install_property (gobject_class,
+ PROP_AGENT,
+ g_param_spec_object ("agent", "ICE agent",
+ "ICE agent in use by this object. WARNING! Accessing this property "
+ "may have disastrous consequences for the operation of webrtcbin. "
+ "Other ICE implementations may not have the same interface.",
+ NICE_TYPE_AGENT, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_TCP,
+ g_param_spec_boolean ("ice-tcp", "ICE TCP",
+ "Whether the agent should use ICE-TCP when gathering candidates",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_ICE_UDP,
+ g_param_spec_boolean ("ice-udp", "ICE UDP",
+ "Whether the agent should use ICE-UDP when gathering candidates",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstWebRTCICE:min-rtp-port:
+ *
+ * Minimum port for local rtp port range.
+ * min-rtp-port must be <= max-rtp-port
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_MIN_RTP_PORT,
+ g_param_spec_uint ("min-rtp-port", "ICE RTP candidate min port",
+ "Minimum port for local rtp port range. "
+ "min-rtp-port must be <= max-rtp-port",
+ 0, 65535, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstWebRTCICE:max-rtp-port:
+ *
+ * Maximum port for local rtp port range.
+ * min-rtp-port must be <= max-rtp-port
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_MAX_RTP_PORT,
+ g_param_spec_uint ("max-rtp-port", "ICE RTP candidate max port",
+ "Maximum port for local rtp port range. "
+ "max-rtp-port must be >= min-rtp-port",
+ 0, 65535, 65535,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstWebRTCICE::add-local-ip-address:
+ * @object: the #GstWebRTCICE
+ * @address: The local IP address
+ *
+ * Add a local IP address to use for ICE candidate gathering. If none
+ * are supplied, they will be discovered automatically. Calling this signal
+ * stops automatic ICE gathering.
+ *
+ * Returns: whether the address could be added.
+ */
+ gst_webrtc_ice_signals[ADD_LOCAL_IP_ADDRESS_SIGNAL] =
+ g_signal_new_class_handler ("add-local-ip-address",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_webrtc_ice_add_local_ip_address), NULL, NULL,
+ g_cclosure_marshal_generic, G_TYPE_BOOLEAN, 1, G_TYPE_STRING);
+ }
+
+ static void
+ gst_webrtc_ice_init (GstWebRTCICE * ice)
+ {
+ ice->priv = gst_webrtc_ice_get_instance_private (ice);
+
+ g_mutex_init (&ice->priv->lock);
+ g_cond_init (&ice->priv->cond);
+
+ ice->turn_servers =
+ g_hash_table_new_full (g_str_hash, g_str_equal, g_free,
+ (GDestroyNotify) gst_uri_unref);
+
+ ice->priv->nice_stream_map =
+ g_array_new (FALSE, TRUE, sizeof (struct NiceStreamItem));
+ g_array_set_clear_func (ice->priv->nice_stream_map,
+ (GDestroyNotify) _clear_ice_stream);
++#ifdef __TIZEN__
++ nice_debug_enable (FALSE);
++#endif
+ }
+
+ GstWebRTCICE *
+ gst_webrtc_ice_new (const gchar * name)
+ {
+ return g_object_new (GST_TYPE_WEBRTC_ICE, "name", name, NULL);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2018 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:gstwebrtc-datachannel
+ * @short_description: RTCDataChannel object
+ * @title: GstWebRTCDataChannel
+ * @see_also: #GstWebRTCRTPTransceiver
+ *
+ * <http://w3c.github.io/webrtc-pc/#dom-rtcsctptransport>
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include "webrtcdatachannel.h"
+ #include <gst/app/gstappsink.h>
+ #include <gst/app/gstappsrc.h>
+ #include <gst/base/gstbytereader.h>
+ #include <gst/base/gstbytewriter.h>
+ #include <gst/sctp/sctpreceivemeta.h>
+ #include <gst/sctp/sctpsendmeta.h>
+
+ #include "gstwebrtcbin.h"
+ #include "utils.h"
+
+ #define GST_CAT_DEFAULT webrtc_data_channel_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ #define webrtc_data_channel_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (WebRTCDataChannel, webrtc_data_channel,
+ GST_TYPE_WEBRTC_DATA_CHANNEL,
+ GST_DEBUG_CATEGORY_INIT (webrtc_data_channel_debug, "webrtcdatachannel", 0,
+ "webrtcdatachannel"););
+
+ typedef enum
+ {
+ DATA_CHANNEL_PPID_WEBRTC_CONTROL = 50,
+ DATA_CHANNEL_PPID_WEBRTC_STRING = 51,
+ DATA_CHANNEL_PPID_WEBRTC_BINARY_PARTIAL = 52, /* deprecated */
+ DATA_CHANNEL_PPID_WEBRTC_BINARY = 53,
+ DATA_CHANNEL_PPID_WEBRTC_STRING_PARTIAL = 54, /* deprecated */
+ DATA_CHANNEL_PPID_WEBRTC_BINARY_EMPTY = 56,
+ DATA_CHANNEL_PPID_WEBRTC_STRING_EMPTY = 57,
+ } DataChannelPPID;
+
+ typedef enum
+ {
+ CHANNEL_TYPE_RELIABLE = 0x00,
+ CHANNEL_TYPE_RELIABLE_UNORDERED = 0x80,
+ CHANNEL_TYPE_PARTIAL_RELIABLE_REXMIT = 0x01,
+ CHANNEL_TYPE_PARTIAL_RELIABLE_REXMIT_UNORDERED = 0x81,
+ CHANNEL_TYPE_PARTIAL_RELIABLE_TIMED = 0x02,
+ CHANNEL_TYPE_PARTIAL_RELIABLE_TIMED_UNORDERED = 0x82,
+ } DataChannelReliabilityType;
+
+ typedef enum
+ {
+ CHANNEL_MESSAGE_ACK = 0x02,
+ CHANNEL_MESSAGE_OPEN = 0x03,
+ } DataChannelMessage;
+
+ static guint16
+ priority_type_to_uint (GstWebRTCPriorityType pri)
+ {
+ switch (pri) {
+ case GST_WEBRTC_PRIORITY_TYPE_VERY_LOW:
+ return 64;
+ case GST_WEBRTC_PRIORITY_TYPE_LOW:
+ return 192;
+ case GST_WEBRTC_PRIORITY_TYPE_MEDIUM:
+ return 384;
+ case GST_WEBRTC_PRIORITY_TYPE_HIGH:
+ return 768;
+ }
+ g_assert_not_reached ();
+ return 0;
+ }
+
+ static GstWebRTCPriorityType
+ priority_uint_to_type (guint16 val)
+ {
+ if (val <= 128)
+ return GST_WEBRTC_PRIORITY_TYPE_VERY_LOW;
+ if (val <= 256)
+ return GST_WEBRTC_PRIORITY_TYPE_LOW;
+ if (val <= 512)
+ return GST_WEBRTC_PRIORITY_TYPE_MEDIUM;
+ return GST_WEBRTC_PRIORITY_TYPE_HIGH;
+ }
+
+ static GstBuffer *
+ construct_open_packet (WebRTCDataChannel * channel)
+ {
+ GstByteWriter w;
+ gsize label_len = strlen (channel->parent.label);
+ gsize proto_len = strlen (channel->parent.protocol);
+ gsize size = 12 + label_len + proto_len;
+ DataChannelReliabilityType reliability = 0;
+ guint32 reliability_param = 0;
+ guint16 priority;
+ GstBuffer *buf;
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Message Type | Channel Type | Priority |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Reliability Parameter |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Label Length | Protocol Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * \ /
+ * | Label |
+ * / \
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * \ /
+ * | Protocol |
+ * / \
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+
+ gst_byte_writer_init_with_size (&w, size, FALSE);
+
+ if (!gst_byte_writer_put_uint8 (&w, (guint8) CHANNEL_MESSAGE_OPEN))
+ g_return_val_if_reached (NULL);
+
+ if (!channel->parent.ordered)
+ reliability |= 0x80;
+ if (channel->parent.max_retransmits != -1) {
+ reliability |= 0x01;
+ reliability_param = channel->parent.max_retransmits;
+ }
+ if (channel->parent.max_packet_lifetime != -1) {
+ reliability |= 0x02;
+ reliability_param = channel->parent.max_packet_lifetime;
+ }
+
+ priority = priority_type_to_uint (channel->parent.priority);
+
+ if (!gst_byte_writer_put_uint8 (&w, (guint8) reliability))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_uint16_be (&w, (guint16) priority))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_uint32_be (&w, (guint32) reliability_param))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_uint16_be (&w, (guint16) label_len))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_uint16_be (&w, (guint16) proto_len))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_data (&w, (guint8 *) channel->parent.label,
+ label_len))
+ g_return_val_if_reached (NULL);
+ if (!gst_byte_writer_put_data (&w, (guint8 *) channel->parent.protocol,
+ proto_len))
+ g_return_val_if_reached (NULL);
+
+ buf = gst_byte_writer_reset_and_get_buffer (&w);
+
+ /* send reliable and ordered */
+ gst_sctp_buffer_add_send_meta (buf, DATA_CHANNEL_PPID_WEBRTC_CONTROL, TRUE,
+ GST_SCTP_SEND_META_PARTIAL_RELIABILITY_NONE, 0);
+
+ return buf;
+ }
+
+ static GstBuffer *
+ construct_ack_packet (WebRTCDataChannel * channel)
+ {
+ GstByteWriter w;
+ GstBuffer *buf;
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Message Type |
+ * +-+-+-+-+-+-+-+-+
+ */
+
+ gst_byte_writer_init_with_size (&w, 1, FALSE);
+
+ if (!gst_byte_writer_put_uint8 (&w, (guint8) CHANNEL_MESSAGE_ACK))
+ g_return_val_if_reached (NULL);
+
+ buf = gst_byte_writer_reset_and_get_buffer (&w);
+
+ /* send reliable and ordered */
+ gst_sctp_buffer_add_send_meta (buf, DATA_CHANNEL_PPID_WEBRTC_CONTROL, TRUE,
+ GST_SCTP_SEND_META_PARTIAL_RELIABILITY_NONE, 0);
+
+ return buf;
+ }
+
+ typedef void (*ChannelTask) (GstWebRTCDataChannel * channel,
+ gpointer user_data);
+
+ struct task
+ {
+ GstWebRTCDataChannel *channel;
+ ChannelTask func;
+ gpointer user_data;
+ GDestroyNotify notify;
+ };
+
+ static GstStructure *
+ _execute_task (GstWebRTCBin * webrtc, struct task *task)
+ {
+ if (task->func)
+ task->func (task->channel, task->user_data);
+
+ return NULL;
+ }
+
+ static void
+ _free_task (struct task *task)
+ {
+ gst_object_unref (task->channel);
+
+ if (task->notify)
+ task->notify (task->user_data);
+ g_free (task);
+ }
+
+ static void
+ _channel_enqueue_task (WebRTCDataChannel * channel, ChannelTask func,
+ gpointer user_data, GDestroyNotify notify)
+ {
+ struct task *task = g_new0 (struct task, 1);
+
+ task->channel = gst_object_ref (channel);
+ task->func = func;
+ task->user_data = user_data;
+ task->notify = notify;
+
+ gst_webrtc_bin_enqueue_task (channel->webrtcbin,
+ (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task,
+ NULL);
+ }
+
+ static void
+ _channel_store_error (WebRTCDataChannel * channel, GError * error)
+ {
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ if (error) {
+ GST_WARNING_OBJECT (channel, "Error: %s",
+ error ? error->message : "Unknown");
+ if (!channel->stored_error)
+ channel->stored_error = error;
+ else
+ g_clear_error (&error);
+ }
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ }
+
+ static void
+ _emit_on_open (WebRTCDataChannel * channel, gpointer user_data)
+ {
+ gst_webrtc_data_channel_on_open (GST_WEBRTC_DATA_CHANNEL (channel));
+ }
+
+ static void
+ _transport_closed (WebRTCDataChannel * channel)
+ {
+ GError *error;
+ gboolean both_sides_closed;
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ error = channel->stored_error;
+ channel->stored_error = NULL;
+
+ both_sides_closed =
+ channel->peer_closed && channel->parent.buffered_amount <= 0;
+ if (both_sides_closed || error) {
+ channel->peer_closed = FALSE;
+ }
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ if (error) {
+ gst_webrtc_data_channel_on_error (GST_WEBRTC_DATA_CHANNEL (channel), error);
+ g_clear_error (&error);
+ }
+ if (both_sides_closed || error) {
+ gst_webrtc_data_channel_on_close (GST_WEBRTC_DATA_CHANNEL (channel));
+ }
+ }
+
+ static void
+ _close_sctp_stream (WebRTCDataChannel * channel, gpointer user_data)
+ {
+ GstPad *pad, *peer;
+
+ GST_INFO_OBJECT (channel, "Closing outgoing SCTP stream %i label \"%s\"",
+ channel->parent.id, channel->parent.label);
+
+ pad = gst_element_get_static_pad (channel->appsrc, "src");
+ peer = gst_pad_get_peer (pad);
+ gst_object_unref (pad);
+
+ if (peer) {
+ GstElement *sctpenc = gst_pad_get_parent_element (peer);
+
+ if (sctpenc) {
+ gst_element_release_request_pad (sctpenc, peer);
+ gst_object_unref (sctpenc);
+ }
+ gst_object_unref (peer);
+ }
+
+ _transport_closed (channel);
+ }
+
+ static void
+ _close_procedure (WebRTCDataChannel * channel, gpointer user_data)
+ {
+ /* https://www.w3.org/TR/webrtc/#data-transport-closing-procedure */
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED) {
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ return;
+ } else if (channel->parent.ready_state ==
+ GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING) {
+ _channel_enqueue_task (channel, (ChannelTask) _transport_closed, NULL,
+ NULL);
+ } else if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) {
+ channel->parent.ready_state = GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING;
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ g_object_notify (G_OBJECT (channel), "ready-state");
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ if (channel->parent.buffered_amount <= 0) {
+ _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream,
+ NULL, NULL);
+ }
+ }
+
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ }
+
+ static void
+ _on_sctp_stream_reset (WebRTCSCTPTransport * sctp, guint stream_id,
+ WebRTCDataChannel * channel)
+ {
+ if (channel->parent.id == stream_id) {
+ GST_INFO_OBJECT (channel,
+ "Received channel close for SCTP stream %i label \"%s\"",
+ channel->parent.id, channel->parent.label);
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ channel->peer_closed = TRUE;
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure,
+ GUINT_TO_POINTER (stream_id), NULL);
+ }
+ }
+
+ static void
+ webrtc_data_channel_close (GstWebRTCDataChannel * channel)
+ {
+ _close_procedure (WEBRTC_DATA_CHANNEL (channel), NULL);
+ }
+
+ static GstFlowReturn
+ _parse_control_packet (WebRTCDataChannel * channel, guint8 * data,
+ gsize size, GError ** error)
+ {
+ GstByteReader r;
+ guint8 message_type;
+ gchar *label = NULL;
+ gchar *proto = NULL;
+
+ if (!data)
+ g_return_val_if_reached (GST_FLOW_ERROR);
+ if (size < 1)
+ g_return_val_if_reached (GST_FLOW_ERROR);
+
+ gst_byte_reader_init (&r, data, size);
+
+ if (!gst_byte_reader_get_uint8 (&r, &message_type))
+ g_return_val_if_reached (GST_FLOW_ERROR);
+
+ if (message_type == CHANNEL_MESSAGE_ACK) {
+ /* all good */
+ GST_INFO_OBJECT (channel, "Received channel ack");
+ return GST_FLOW_OK;
+ } else if (message_type == CHANNEL_MESSAGE_OPEN) {
+ guint8 reliability;
+ guint32 reliability_param;
+ guint16 priority, label_len, proto_len;
+ const guint8 *src;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ GST_INFO_OBJECT (channel, "Received channel open");
+
+ if (channel->parent.negotiated) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Data channel was signalled as negotiated already");
+ g_return_val_if_reached (GST_FLOW_ERROR);
+ }
+
+ if (channel->opened)
+ return GST_FLOW_OK;
+
+ if (!gst_byte_reader_get_uint8 (&r, &reliability))
+ goto parse_error;
+ if (!gst_byte_reader_get_uint16_be (&r, &priority))
+ goto parse_error;
+ if (!gst_byte_reader_get_uint32_be (&r, &reliability_param))
+ goto parse_error;
+ if (!gst_byte_reader_get_uint16_be (&r, &label_len))
+ goto parse_error;
+ if (!gst_byte_reader_get_uint16_be (&r, &proto_len))
+ goto parse_error;
+
+ label = g_new0 (gchar, (gsize) label_len + 1);
+ proto = g_new0 (gchar, (gsize) proto_len + 1);
+
+ if (!gst_byte_reader_get_data (&r, label_len, &src))
+ goto parse_error;
+ memcpy (label, src, label_len);
+ label[label_len] = '\0';
+ if (!gst_byte_reader_get_data (&r, proto_len, &src))
+ goto parse_error;
+ memcpy (proto, src, proto_len);
+ proto[proto_len] = '\0';
+
+ g_free (channel->parent.label);
+ channel->parent.label = label;
+ g_free (channel->parent.protocol);
+ channel->parent.protocol = proto;
+ channel->parent.priority = priority_uint_to_type (priority);
+ channel->parent.ordered = !(reliability & 0x80);
+ if (reliability & 0x01) {
+ channel->parent.max_retransmits = reliability_param;
+ channel->parent.max_packet_lifetime = -1;
+ } else if (reliability & 0x02) {
+ channel->parent.max_retransmits = -1;
+ channel->parent.max_packet_lifetime = reliability_param;
+ } else {
+ channel->parent.max_retransmits = -1;
+ channel->parent.max_packet_lifetime = -1;
+ }
+ channel->opened = TRUE;
+
+ GST_INFO_OBJECT (channel, "Received channel open for SCTP stream %i "
+ "label \"%s\" protocol %s ordered %s", channel->parent.id,
+ channel->parent.label, channel->parent.protocol,
+ channel->parent.ordered ? "true" : "false");
+
+ _channel_enqueue_task (channel, (ChannelTask) _emit_on_open, NULL, NULL);
+
+ GST_INFO_OBJECT (channel, "Sending channel ack");
+ buffer = construct_ack_packet (channel);
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ channel->parent.buffered_amount += gst_buffer_get_size (buffer);
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer);
+ if (ret != GST_FLOW_OK) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Could not send ack packet");
+ return ret;
+ }
+
+ return ret;
+ } else {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Unknown message type in control protocol");
+ return GST_FLOW_ERROR;
+ }
+
+ parse_error:
+ {
+ g_free (label);
+ g_free (proto);
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to parse packet");
+ g_return_val_if_reached (GST_FLOW_ERROR);
+ }
+ }
+
+ static void
+ on_sink_eos (GstAppSink * sink, gpointer user_data)
+ {
+ }
+
+ struct map_info
+ {
+ GstBuffer *buffer;
+ GstMapInfo map_info;
+ };
+
+ static void
+ buffer_unmap_and_unref (struct map_info *info)
+ {
+ gst_buffer_unmap (info->buffer, &info->map_info);
+ gst_buffer_unref (info->buffer);
+ g_free (info);
+ }
+
+ static void
+ _emit_have_data (WebRTCDataChannel * channel, GBytes * data)
+ {
+ gst_webrtc_data_channel_on_message_data (GST_WEBRTC_DATA_CHANNEL (channel),
+ data);
+ }
+
+ static void
+ _emit_have_string (GstWebRTCDataChannel * channel, gchar * str)
+ {
+ gst_webrtc_data_channel_on_message_string (GST_WEBRTC_DATA_CHANNEL (channel),
+ str);
+ }
+
+ static GstFlowReturn
+ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample,
+ GError ** error)
+ {
+ GstSctpReceiveMeta *receive;
+ GstBuffer *buffer;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_LOG_OBJECT (channel, "Received sample %" GST_PTR_FORMAT, sample);
+
+ g_return_val_if_fail (channel->sctp_transport != NULL, GST_FLOW_ERROR);
+
+ buffer = gst_sample_get_buffer (sample);
+ if (!buffer) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "No buffer to handle");
+ return GST_FLOW_ERROR;
+ }
+ receive = gst_sctp_buffer_get_receive_meta (buffer);
+ if (!receive) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "No SCTP Receive meta on the buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ switch (receive->ppid) {
+ case DATA_CHANNEL_PPID_WEBRTC_CONTROL:{
+ GstMapInfo info = GST_MAP_INFO_INIT;
+ if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Failed to map received buffer");
+ ret = GST_FLOW_ERROR;
+ } else {
+ ret = _parse_control_packet (channel, info.data, info.size, error);
+ gst_buffer_unmap (buffer, &info);
+ }
+ break;
+ }
+ case DATA_CHANNEL_PPID_WEBRTC_STRING:
+ case DATA_CHANNEL_PPID_WEBRTC_STRING_PARTIAL:{
+ GstMapInfo info = GST_MAP_INFO_INIT;
+ if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Failed to map received buffer");
+ ret = GST_FLOW_ERROR;
+ } else {
+ gchar *str = g_strndup ((gchar *) info.data, info.size);
+ _channel_enqueue_task (channel, (ChannelTask) _emit_have_string, str,
+ g_free);
+ gst_buffer_unmap (buffer, &info);
+ }
+ break;
+ }
+ case DATA_CHANNEL_PPID_WEBRTC_BINARY:
+ case DATA_CHANNEL_PPID_WEBRTC_BINARY_PARTIAL:{
+ struct map_info *info = g_new0 (struct map_info, 1);
+ if (!gst_buffer_map (buffer, &info->map_info, GST_MAP_READ)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Failed to map received buffer");
+ ret = GST_FLOW_ERROR;
+ } else {
+ GBytes *data = g_bytes_new_with_free_func (info->map_info.data,
+ info->map_info.size, (GDestroyNotify) buffer_unmap_and_unref, info);
+ info->buffer = gst_buffer_ref (buffer);
+ _channel_enqueue_task (channel, (ChannelTask) _emit_have_data, data,
+ (GDestroyNotify) g_bytes_unref);
+ }
+ break;
+ }
+ case DATA_CHANNEL_PPID_WEBRTC_BINARY_EMPTY:
+ _channel_enqueue_task (channel, (ChannelTask) _emit_have_data, NULL,
+ NULL);
+ break;
+ case DATA_CHANNEL_PPID_WEBRTC_STRING_EMPTY:
+ _channel_enqueue_task (channel, (ChannelTask) _emit_have_string, NULL,
+ NULL);
+ break;
+ default:
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Unknown SCTP PPID %u received", receive->ppid);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ on_sink_preroll (GstAppSink * sink, gpointer user_data)
+ {
+ WebRTCDataChannel *channel = user_data;
+ GstSample *sample = gst_app_sink_pull_preroll (sink);
+ GstFlowReturn ret;
+
+ if (sample) {
+ /* This sample also seems to be provided by the sample callback
+ ret = _data_channel_have_sample (channel, sample); */
+ ret = GST_FLOW_OK;
+ gst_sample_unref (sample);
+ } else if (gst_app_sink_is_eos (sink)) {
+ ret = GST_FLOW_EOS;
+ } else {
+ ret = GST_FLOW_ERROR;
+ }
+
+ if (ret != GST_FLOW_OK) {
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL);
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ on_sink_sample (GstAppSink * sink, gpointer user_data)
+ {
+ WebRTCDataChannel *channel = user_data;
+ GstSample *sample = gst_app_sink_pull_sample (sink);
+ GstFlowReturn ret;
+ GError *error = NULL;
+
+ if (sample) {
+ ret = _data_channel_have_sample (channel, sample, &error);
+ gst_sample_unref (sample);
+ } else if (gst_app_sink_is_eos (sink)) {
+ ret = GST_FLOW_EOS;
+ } else {
+ ret = GST_FLOW_ERROR;
+ }
+
+ if (error)
+ _channel_store_error (channel, error);
+
+ if (ret != GST_FLOW_OK) {
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL);
+ }
+
+ return ret;
+ }
+
+ static GstAppSinkCallbacks sink_callbacks = {
+ on_sink_eos,
+ on_sink_preroll,
+ on_sink_sample,
+ };
+
+ void
+ webrtc_data_channel_start_negotiation (WebRTCDataChannel * channel)
+ {
+ GstBuffer *buffer;
+
+ g_return_if_fail (!channel->parent.negotiated);
+ g_return_if_fail (channel->parent.id != -1);
+ g_return_if_fail (channel->sctp_transport != NULL);
+
+ buffer = construct_open_packet (channel);
+
+ GST_INFO_OBJECT (channel, "Sending channel open for SCTP stream %i "
+ "label \"%s\" protocol %s ordered %s", channel->parent.id,
+ channel->parent.label, channel->parent.protocol,
+ channel->parent.ordered ? "true" : "false");
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ channel->parent.buffered_amount += gst_buffer_get_size (buffer);
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ if (gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc),
+ buffer) == GST_FLOW_OK) {
+ channel->opened = TRUE;
+ _channel_enqueue_task (channel, (ChannelTask) _emit_on_open, NULL, NULL);
+ } else {
+ GError *error = NULL;
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Failed to send DCEP open packet");
+ _channel_store_error (channel, error);
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL);
+ }
+ }
+
+ static void
+ _get_sctp_reliability (WebRTCDataChannel * channel,
+ GstSctpSendMetaPartiallyReliability * reliability, guint * rel_param)
+ {
+ if (channel->parent.max_retransmits != -1) {
+ *reliability = GST_SCTP_SEND_META_PARTIAL_RELIABILITY_RTX;
+ *rel_param = channel->parent.max_retransmits;
+ } else if (channel->parent.max_packet_lifetime != -1) {
+ *reliability = GST_SCTP_SEND_META_PARTIAL_RELIABILITY_TTL;
+ *rel_param = channel->parent.max_packet_lifetime;
+ } else {
+ *reliability = GST_SCTP_SEND_META_PARTIAL_RELIABILITY_NONE;
+ *rel_param = 0;
+ }
+ }
+
+ static gboolean
+ _is_within_max_message_size (WebRTCDataChannel * channel, gsize size)
+ {
+ return size <= channel->sctp_transport->max_message_size;
+ }
+
+ static void
+ webrtc_data_channel_send_data (GstWebRTCDataChannel * base_channel,
+ GBytes * bytes)
+ {
+ WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (base_channel);
+ GstSctpSendMetaPartiallyReliability reliability;
+ guint rel_param;
+ guint32 ppid;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ if (!bytes) {
+ buffer = gst_buffer_new ();
+ ppid = DATA_CHANNEL_PPID_WEBRTC_BINARY_EMPTY;
+ } else {
+ gsize size;
+ guint8 *data;
+
+ data = (guint8 *) g_bytes_get_data (bytes, &size);
+ g_return_if_fail (data != NULL);
+ if (!_is_within_max_message_size (channel, size)) {
+ GError *error = NULL;
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Requested to send data that is too large");
+ _channel_store_error (channel, error);
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL,
+ NULL);
+ return;
+ }
+
+ buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, data, size,
+ 0, size, g_bytes_ref (bytes), (GDestroyNotify) g_bytes_unref);
+ ppid = DATA_CHANNEL_PPID_WEBRTC_BINARY;
+ }
+
+ _get_sctp_reliability (channel, &reliability, &rel_param);
+ gst_sctp_buffer_add_send_meta (buffer, ppid, channel->parent.ordered,
+ reliability, rel_param);
+
+ GST_LOG_OBJECT (channel, "Sending data using buffer %" GST_PTR_FORMAT,
+ buffer);
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ channel->parent.buffered_amount += gst_buffer_get_size (buffer);
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer);
+
+ if (ret != GST_FLOW_OK) {
+ GError *error = NULL;
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to send data");
+ _channel_store_error (channel, error);
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL);
+ }
+ }
+
+ static void
+ webrtc_data_channel_send_string (GstWebRTCDataChannel * base_channel,
+ const gchar * str)
+ {
+ WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (base_channel);
+ GstSctpSendMetaPartiallyReliability reliability;
+ guint rel_param;
+ guint32 ppid;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ if (!channel->parent.negotiated)
+ g_return_if_fail (channel->opened);
+ g_return_if_fail (channel->sctp_transport != NULL);
+
+ if (!str) {
+ buffer = gst_buffer_new ();
+ ppid = DATA_CHANNEL_PPID_WEBRTC_STRING_EMPTY;
+ } else {
+ gsize size = strlen (str);
++#ifndef __TIZEN__
+ gchar *str_copy = g_strdup (str);
++#else
++ gchar *str_copy;
++#endif
+
+ if (!_is_within_max_message_size (channel, size)) {
+ GError *error = NULL;
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE,
+ "Requested to send a string that is too large");
+ _channel_store_error (channel, error);
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL,
+ NULL);
+ return;
+ }
+
++#ifdef __TIZEN__
++ str_copy = g_strdup (str);
++#endif
+ buffer =
+ gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, str_copy,
+ size, 0, size, str_copy, g_free);
+ ppid = DATA_CHANNEL_PPID_WEBRTC_STRING;
+ }
+
+ _get_sctp_reliability (channel, &reliability, &rel_param);
+ gst_sctp_buffer_add_send_meta (buffer, ppid, channel->parent.ordered,
+ reliability, rel_param);
+
+ GST_TRACE_OBJECT (channel, "Sending string using buffer %" GST_PTR_FORMAT,
+ buffer);
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ channel->parent.buffered_amount += gst_buffer_get_size (buffer);
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+
+ ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer);
+
+ if (ret != GST_FLOW_OK) {
+ GError *error = NULL;
+ g_set_error (&error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to send string");
+ _channel_store_error (channel, error);
+ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL);
+ }
+ }
+
+ static void
+ _on_sctp_notify_state_unlocked (GObject * sctp_transport,
+ WebRTCDataChannel * channel)
+ {
+ GstWebRTCSCTPTransportState state;
+
+ g_object_get (sctp_transport, "state", &state, NULL);
+ if (state == GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED) {
+ if (channel->parent.negotiated)
+ _channel_enqueue_task (channel, (ChannelTask) _emit_on_open, NULL, NULL);
+ }
+ }
+
+ static void
+ _on_sctp_notify_state (GObject * sctp_transport, GParamSpec * pspec,
+ WebRTCDataChannel * channel)
+ {
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ _on_sctp_notify_state_unlocked (sctp_transport, channel);
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ }
+
+ static void
+ _emit_low_threshold (WebRTCDataChannel * channel, gpointer user_data)
+ {
+ gst_webrtc_data_channel_on_buffered_amount_low (GST_WEBRTC_DATA_CHANNEL
+ (channel));
+ }
+
+ static GstPadProbeReturn
+ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ WebRTCDataChannel *channel = user_data;
+ guint64 prev_amount;
+ guint64 size = 0;
+
+ if (GST_PAD_PROBE_INFO_TYPE (info) & (GST_PAD_PROBE_TYPE_BUFFER)) {
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+ size = gst_buffer_get_size (buffer);
+ } else if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_BUFFER_LIST) {
+ GstBufferList *list = GST_PAD_PROBE_INFO_BUFFER_LIST (info);
+ size = gst_buffer_list_calculate_size (list);
+ }
+
+ if (size > 0) {
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ prev_amount = channel->parent.buffered_amount;
+ channel->parent.buffered_amount -= size;
+ GST_TRACE_OBJECT (channel, "checking low-threshold: prev %"
+ G_GUINT64_FORMAT " low-threshold %" G_GUINT64_FORMAT " buffered %"
+ G_GUINT64_FORMAT, prev_amount,
+ channel->parent.buffered_amount_low_threshold,
+ channel->parent.buffered_amount);
+ if (prev_amount >= channel->parent.buffered_amount_low_threshold
+ && channel->parent.buffered_amount <
+ channel->parent.buffered_amount_low_threshold) {
+ _channel_enqueue_task (channel, (ChannelTask) _emit_low_threshold, NULL,
+ NULL);
+ }
+
+ if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING
+ && channel->parent.buffered_amount <= 0) {
+ _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, NULL,
+ NULL);
+ }
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ }
+
+ return GST_PAD_PROBE_OK;
+ }
+
+ static void
+ gst_webrtc_data_channel_constructed (GObject * object)
+ {
+ WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (object);
+ GstPad *pad;
+ GstCaps *caps;
+
+ caps = gst_caps_new_any ();
+
+ channel->appsrc = gst_element_factory_make ("appsrc", NULL);
+ gst_object_ref_sink (channel->appsrc);
+ pad = gst_element_get_static_pad (channel->appsrc, "src");
+
+ channel->src_probe = gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_DATA_BOTH,
+ (GstPadProbeCallback) on_appsrc_data, channel, NULL);
+
+ channel->appsink = gst_element_factory_make ("appsink", NULL);
+ gst_object_ref_sink (channel->appsink);
+ g_object_set (channel->appsink, "sync", FALSE, "async", FALSE, "caps", caps,
+ NULL);
+ gst_app_sink_set_callbacks (GST_APP_SINK (channel->appsink), &sink_callbacks,
+ channel, NULL);
+
+ gst_object_unref (pad);
+ gst_caps_unref (caps);
+ }
+
+ static void
+ gst_webrtc_data_channel_finalize (GObject * object)
+ {
+ WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (object);
+
+ if (channel->src_probe) {
+ GstPad *pad = gst_element_get_static_pad (channel->appsrc, "src");
+ gst_pad_remove_probe (pad, channel->src_probe);
+ gst_object_unref (pad);
+ channel->src_probe = 0;
+ }
+
+ if (channel->sctp_transport)
+ g_signal_handlers_disconnect_by_data (channel->sctp_transport, channel);
+ g_clear_object (&channel->sctp_transport);
+
+ g_clear_object (&channel->appsrc);
+ g_clear_object (&channel->appsink);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ webrtc_data_channel_class_init (WebRTCDataChannelClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstWebRTCDataChannelClass *channel_class =
+ (GstWebRTCDataChannelClass *) klass;
+
+ gobject_class->constructed = gst_webrtc_data_channel_constructed;
+ gobject_class->finalize = gst_webrtc_data_channel_finalize;
+
+ channel_class->send_data = webrtc_data_channel_send_data;
+ channel_class->send_string = webrtc_data_channel_send_string;
+ channel_class->close = webrtc_data_channel_close;
+ }
+
+ static void
+ webrtc_data_channel_init (WebRTCDataChannel * channel)
+ {
+ }
+
+ static void
+ _data_channel_set_sctp_transport (WebRTCDataChannel * channel,
+ WebRTCSCTPTransport * sctp)
+ {
+ g_return_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel));
+ g_return_if_fail (GST_IS_WEBRTC_SCTP_TRANSPORT (sctp));
+
+ GST_WEBRTC_DATA_CHANNEL_LOCK (channel);
+ if (channel->sctp_transport)
+ g_signal_handlers_disconnect_by_data (channel->sctp_transport, channel);
+
+ gst_object_replace ((GstObject **) & channel->sctp_transport,
+ GST_OBJECT (sctp));
+
+ if (sctp) {
+ g_signal_connect (sctp, "stream-reset", G_CALLBACK (_on_sctp_stream_reset),
+ channel);
+ g_signal_connect (sctp, "notify::state", G_CALLBACK (_on_sctp_notify_state),
+ channel);
+ }
+ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel);
+ }
+
+ void
+ webrtc_data_channel_link_to_sctp (WebRTCDataChannel * channel,
+ WebRTCSCTPTransport * sctp_transport)
+ {
+ if (sctp_transport && !channel->sctp_transport) {
+ gint id;
+
+ g_object_get (channel, "id", &id, NULL);
+
+ if (sctp_transport->association_established && id != -1) {
+ gchar *pad_name;
+
+ _data_channel_set_sctp_transport (channel, sctp_transport);
+ pad_name = g_strdup_printf ("sink_%u", id);
+ if (!gst_element_link_pads (channel->appsrc, "src",
+ channel->sctp_transport->sctpenc, pad_name))
+ g_warn_if_reached ();
+ g_free (pad_name);
+
+ _on_sctp_notify_state_unlocked (G_OBJECT (sctp_transport), channel);
+ }
+ }
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2017 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include "webrtcsdp.h"
+
+ #include "utils.h"
+
+ #include <string.h>
+ #include <stdlib.h>
+
+ #define IS_EMPTY_SDP_ATTRIBUTE(val) (val == NULL || g_strcmp0(val, "") == 0)
+
+ const gchar *
+ _sdp_source_to_string (SDPSource source)
+ {
+ switch (source) {
+ case SDP_LOCAL:
+ return "local";
+ case SDP_REMOTE:
+ return "remote";
+ default:
+ return "none";
+ }
+ }
+
+ static gboolean
+ _check_valid_state_for_sdp_change (GstWebRTCSignalingState state,
+ SDPSource source, GstWebRTCSDPType type, GError ** error)
+ {
+ #define STATE(val) GST_WEBRTC_SIGNALING_STATE_ ## val
+ #define TYPE(val) GST_WEBRTC_SDP_TYPE_ ## val
+
+ if (source == SDP_LOCAL && type == TYPE (OFFER) && state == STATE (STABLE))
+ return TRUE;
+ if (source == SDP_LOCAL && type == TYPE (OFFER)
+ && state == STATE (HAVE_LOCAL_OFFER))
+ return TRUE;
+ if (source == SDP_LOCAL && type == TYPE (ANSWER)
+ && state == STATE (HAVE_REMOTE_OFFER))
+ return TRUE;
+ if (source == SDP_LOCAL && type == TYPE (PRANSWER)
+ && state == STATE (HAVE_REMOTE_OFFER))
+ return TRUE;
+ if (source == SDP_LOCAL && type == TYPE (PRANSWER)
+ && state == STATE (HAVE_LOCAL_PRANSWER))
+ return TRUE;
+
+ if (source == SDP_REMOTE && type == TYPE (OFFER) && state == STATE (STABLE))
+ return TRUE;
+ if (source == SDP_REMOTE && type == TYPE (OFFER)
+ && state == STATE (HAVE_REMOTE_OFFER))
+ return TRUE;
+ if (source == SDP_REMOTE && type == TYPE (ANSWER)
+ && state == STATE (HAVE_LOCAL_OFFER))
+ return TRUE;
+ if (source == SDP_REMOTE && type == TYPE (PRANSWER)
+ && state == STATE (HAVE_LOCAL_OFFER))
+ return TRUE;
+ if (source == SDP_REMOTE && type == TYPE (PRANSWER)
+ && state == STATE (HAVE_REMOTE_PRANSWER))
+ return TRUE;
+
+ {
+ gchar *state_str = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE,
+ state);
+ gchar *type_str = _enum_value_to_string (GST_TYPE_WEBRTC_SDP_TYPE, type);
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_INVALID_STATE,
+ "Not in the correct state (%s) for setting %s %s description",
+ state_str, _sdp_source_to_string (source), type_str);
+ g_free (state_str);
+ g_free (type_str);
+ }
+
+ return FALSE;
+
+ #undef STATE
+ #undef TYPE
+ }
+
+ static gboolean
+ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp,
+ GError ** error)
+ {
+ const gchar *message_fingerprint, *fingerprint;
+ const GstSDPKey *key;
+ int i;
+
+ key = gst_sdp_message_get_key (sdp->sdp);
+ if (!IS_EMPTY_SDP_ATTRIBUTE (key->data)) {
+ g_set_error_literal (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_BAD_SDP, "sdp contains a k line");
+ return FALSE;
+ }
+
+ message_fingerprint = fingerprint =
+ gst_sdp_message_get_attribute_val (sdp->sdp, "fingerprint");
+ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i);
+ const gchar *media_fingerprint =
+ gst_sdp_media_get_attribute_val (media, "fingerprint");
+
+ if (!IS_EMPTY_SDP_ATTRIBUTE (message_fingerprint)
+ && !IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_FINGERPRINT,
+ "No fingerprint lines in sdp for media %u", i);
+ return FALSE;
+ }
+ if (IS_EMPTY_SDP_ATTRIBUTE (fingerprint)) {
+ fingerprint = media_fingerprint;
+ }
+ if (!IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint)
+ && g_strcmp0 (fingerprint, media_fingerprint) != 0) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_FINGERPRINT,
+ "Fingerprint in media %u differs from %s fingerprint. "
+ "\'%s\' != \'%s\'", i, message_fingerprint ? "global" : "previous",
+ fingerprint, media_fingerprint);
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ gboolean
+ _message_has_attribute_key (const GstSDPMessage * msg, const gchar * key)
+ {
+ int i;
+ for (i = 0; i < gst_sdp_message_attributes_len (msg); i++) {
+ const GstSDPAttribute *attr = gst_sdp_message_get_attribute (msg, i);
+
+ if (g_strcmp0 (attr->key, key) == 0)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ #if 0
+ static gboolean
+ _session_has_attribute_key_value (const GstSDPMessage * msg, const gchar * key,
+ const gchar * value)
+ {
+ int i;
+ for (i = 0; i < gst_sdp_message_attributes_len (msg); i++) {
+ const GstSDPAttribute *attr = gst_sdp_message_get_attribute (msg, i);
+
+ if (g_strcmp0 (attr->key, key) == 0 && g_strcmp0 (attr->value, value) == 0)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ _check_trickle_ice (GstSDPMessage * msg, GError ** error)
+ {
+ if (!_session_has_attribute_key_value (msg, "ice-options", "trickle")) {
+ g_set_error_literal (error, GST_WEBRTC_BIN_ERROR,
+ GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "No required \'a=ice-options:trickle\' line in sdp");
+ }
+ return TRUE;
+ }
+ #endif
+ gboolean
+ _media_has_attribute_key (const GstSDPMedia * media, const gchar * key)
+ {
+ int i;
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, key) == 0)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ _media_has_mid (const GstSDPMedia * media, guint media_idx, GError ** error)
+ {
+ const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid");
+ if (IS_EMPTY_SDP_ATTRIBUTE (mid)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u is missing or contains an empty \'mid\' attribute",
+ media_idx);
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ const gchar *
+ _media_get_ice_ufrag (const GstSDPMessage * msg, guint media_idx)
+ {
+ const gchar *ice_ufrag;
+
+ ice_ufrag = gst_sdp_message_get_attribute_val (msg, "ice-ufrag");
+ if (IS_EMPTY_SDP_ATTRIBUTE (ice_ufrag)) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (msg, media_idx);
+ ice_ufrag = gst_sdp_media_get_attribute_val (media, "ice-ufrag");
+ if (IS_EMPTY_SDP_ATTRIBUTE (ice_ufrag))
+ return NULL;
+ }
+ return ice_ufrag;
+ }
+
+ const gchar *
+ _media_get_ice_pwd (const GstSDPMessage * msg, guint media_idx)
+ {
+ const gchar *ice_pwd;
+
+ ice_pwd = gst_sdp_message_get_attribute_val (msg, "ice-pwd");
+ if (IS_EMPTY_SDP_ATTRIBUTE (ice_pwd)) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (msg, media_idx);
+ ice_pwd = gst_sdp_media_get_attribute_val (media, "ice-pwd");
+ if (IS_EMPTY_SDP_ATTRIBUTE (ice_pwd))
+ return NULL;
+ }
+ return ice_pwd;
+ }
+
+ static gboolean
+ _media_has_setup (const GstSDPMedia * media, guint media_idx, GError ** error)
+ {
+ static const gchar *valid_setups[] = { "actpass", "active", "passive", NULL };
+ const gchar *setup = gst_sdp_media_get_attribute_val (media, "setup");
+ if (IS_EMPTY_SDP_ATTRIBUTE (setup)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u is missing or contains an empty \'setup\' attribute",
+ media_idx);
+ return FALSE;
+ }
+ if (!g_strv_contains (valid_setups, setup)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u contains unknown \'setup\' attribute, \'%s\'", media_idx,
+ setup);
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ #if 0
+ static gboolean
+ _media_has_dtls_id (const GstSDPMedia * media, guint media_idx, GError ** error)
+ {
+ const gchar *dtls_id = gst_sdp_media_get_attribute_val (media, "ice-pwd");
+ if (IS_EMPTY_SDP_ATTRIBUTE (dtls_id)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u is missing or contains an empty \'dtls-id\' attribute",
+ media_idx);
+ return FALSE;
+ }
+ return TRUE;
+ }
+ #endif
+ gboolean
+ validate_sdp (GstWebRTCSignalingState state, SDPSource source,
+ GstWebRTCSessionDescription * sdp, GError ** error)
+ {
+ const gchar *group, *bundle_ice_ufrag = NULL, *bundle_ice_pwd = NULL;
+ gchar **group_members = NULL;
+ gboolean is_bundle = FALSE;
+ int i;
+
+ if (!_check_valid_state_for_sdp_change (state, source, sdp->type, error))
+ return FALSE;
+ if (!_check_sdp_crypto (source, sdp, error))
+ return FALSE;
+ /* not explicitly required
+ if (ICE && !_check_trickle_ice (sdp->sdp))
+ return FALSE;*/
+ group = gst_sdp_message_get_attribute_val (sdp->sdp, "group");
+ is_bundle = group && g_str_has_prefix (group, "BUNDLE");
+ if (is_bundle)
+ group_members = g_strsplit (&group[6], " ", -1);
+
+ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i);
+ const gchar *mid;
+ gboolean media_in_bundle = FALSE;
+ if (!_media_has_mid (media, i, error))
+ goto fail;
+ mid = gst_sdp_media_get_attribute_val (media, "mid");
+ media_in_bundle = is_bundle
+ && g_strv_contains ((const gchar **) group_members, mid);
+ if (!_media_get_ice_ufrag (sdp->sdp, i)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u is missing or contains an empty \'ice-ufrag\' attribute",
+ i);
+ goto fail;
+ }
+ if (!_media_get_ice_pwd (sdp->sdp, i)) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u is missing or contains an empty \'ice-pwd\' attribute", i);
+ goto fail;
+ }
+ if (!_media_has_setup (media, i, error))
+ goto fail;
+ /* check parameters in bundle are the same */
+ if (media_in_bundle) {
+ const gchar *ice_ufrag =
+ gst_sdp_media_get_attribute_val (media, "ice-ufrag");
+ const gchar *ice_pwd = gst_sdp_media_get_attribute_val (media, "ice-pwd");
+ if (!bundle_ice_ufrag)
+ bundle_ice_ufrag = ice_ufrag;
+ else if (g_strcmp0 (bundle_ice_ufrag, ice_ufrag) != 0) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u has different ice-ufrag values in bundle. "
+ "%s != %s", i, bundle_ice_ufrag, ice_ufrag);
+ goto fail;
+ }
+ if (!bundle_ice_pwd) {
+ bundle_ice_pwd = ice_pwd;
+ } else if (g_strcmp0 (bundle_ice_pwd, ice_pwd) != 0) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "media %u has different ice-pwd values in bundle. "
+ "%s != %s", i, bundle_ice_pwd, ice_pwd);
+ goto fail;
+ }
+ }
+ }
+
+ g_strfreev (group_members);
+
+ return TRUE;
+
+ fail:
+ g_strfreev (group_members);
+ return FALSE;
+ }
+
+ GstWebRTCRTPTransceiverDirection
+ _get_direction_from_media (const GstSDPMedia * media)
+ {
+ GstWebRTCRTPTransceiverDirection new_dir =
+ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE;
+ int i;
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "sendonly") == 0) {
+ if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ GST_ERROR ("Multiple direction attributes");
+ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE;
+ }
+ new_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY;
+ } else if (g_strcmp0 (attr->key, "sendrecv") == 0) {
+ if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ GST_ERROR ("Multiple direction attributes");
+ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE;
+ }
+ new_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV;
+ } else if (g_strcmp0 (attr->key, "recvonly") == 0) {
+ if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ GST_ERROR ("Multiple direction attributes");
+ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE;
+ }
+ new_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY;
+ } else if (g_strcmp0 (attr->key, "inactive") == 0) {
+ if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) {
+ GST_ERROR ("Multiple direction attributes");
+ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE;
+ }
+ new_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE;
+ }
+ }
+
+ return new_dir;
+ }
+
+ #define DIR(val) GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_ ## val
+ GstWebRTCRTPTransceiverDirection
+ _intersect_answer_directions (GstWebRTCRTPTransceiverDirection offer,
+ GstWebRTCRTPTransceiverDirection answer)
+ {
+ if (offer == DIR (INACTIVE) || answer == DIR (INACTIVE))
+ return DIR (INACTIVE);
+ if (offer == DIR (SENDONLY) && answer == DIR (SENDRECV))
+ return DIR (RECVONLY);
+ if (offer == DIR (SENDONLY) && answer == DIR (RECVONLY))
+ return DIR (RECVONLY);
+ if (offer == DIR (RECVONLY) && answer == DIR (SENDRECV))
+ return DIR (SENDONLY);
+ if (offer == DIR (RECVONLY) && answer == DIR (SENDONLY))
+ return DIR (SENDONLY);
+ if (offer == DIR (SENDRECV) && answer == DIR (SENDRECV))
+ return DIR (SENDRECV);
+ if (offer == DIR (SENDRECV) && answer == DIR (SENDONLY))
+ return DIR (SENDONLY);
+ if (offer == DIR (SENDRECV) && answer == DIR (RECVONLY))
+ return DIR (RECVONLY);
+ if (offer == DIR (RECVONLY) && answer == DIR (RECVONLY))
+ return DIR (INACTIVE);
+ if (offer == DIR (SENDONLY) && answer == DIR (SENDONLY))
+ return DIR (INACTIVE);
+
+ return DIR (NONE);
+ }
+
+ void
+ _media_replace_direction (GstSDPMedia * media,
+ GstWebRTCRTPTransceiverDirection direction)
+ {
+ gchar *dir_str;
+ int i;
+
+ dir_str =
+ _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION,
+ direction);
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "sendonly") == 0
+ || g_strcmp0 (attr->key, "sendrecv") == 0
+ || g_strcmp0 (attr->key, "recvonly") == 0
+ || g_strcmp0 (attr->key, "inactive") == 0) {
+ GstSDPAttribute new_attr = { 0, };
+ GST_TRACE ("replace %s with %s", attr->key, dir_str);
+ gst_sdp_attribute_set (&new_attr, dir_str, "");
+ gst_sdp_media_replace_attribute (media, i, &new_attr);
+ g_free (dir_str);
+ return;
+ }
+ }
+
+ GST_TRACE ("add %s", dir_str);
+ gst_sdp_media_add_attribute (media, dir_str, "");
+ g_free (dir_str);
+ }
+
+ GstWebRTCRTPTransceiverDirection
+ _get_final_direction (GstWebRTCRTPTransceiverDirection local_dir,
+ GstWebRTCRTPTransceiverDirection remote_dir)
+ {
+ GstWebRTCRTPTransceiverDirection new_dir;
+ new_dir = DIR (NONE);
+ switch (local_dir) {
+ case DIR (INACTIVE):
+ new_dir = DIR (INACTIVE);
+ break;
+ case DIR (SENDONLY):
+ if (remote_dir == DIR (SENDONLY)) {
+ GST_ERROR ("remote SDP has the same directionality. "
+ "This is not legal.");
+ return DIR (NONE);
+ } else if (remote_dir == DIR (INACTIVE)) {
+ new_dir = DIR (INACTIVE);
+ } else {
+ new_dir = DIR (SENDONLY);
+ }
+ break;
+ case DIR (RECVONLY):
+ if (remote_dir == DIR (RECVONLY)) {
+ GST_ERROR ("remote SDP has the same directionality. "
+ "This is not legal.");
+ return DIR (NONE);
+ } else if (remote_dir == DIR (INACTIVE)) {
+ new_dir = DIR (INACTIVE);
+ } else {
+ new_dir = DIR (RECVONLY);
+ }
+ break;
+ case DIR (SENDRECV):
+ if (remote_dir == DIR (INACTIVE)) {
+ new_dir = DIR (INACTIVE);
+ } else if (remote_dir == DIR (SENDONLY)) {
+ new_dir = DIR (RECVONLY);
+ } else if (remote_dir == DIR (RECVONLY)) {
+ new_dir = DIR (SENDONLY);
+ } else if (remote_dir == DIR (SENDRECV)) {
+ new_dir = DIR (SENDRECV);
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (new_dir == DIR (NONE)) {
+ GST_ERROR ("Abnormal situation!");
+ return DIR (NONE);
+ }
+
+ return new_dir;
+ }
+
+ #undef DIR
+
+ #define SETUP(val) GST_WEBRTC_DTLS_SETUP_ ## val
+ GstWebRTCDTLSSetup
+ _get_dtls_setup_from_media (const GstSDPMedia * media)
+ {
+ int i;
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "setup") == 0) {
+ if (g_strcmp0 (attr->value, "actpass") == 0) {
+ return SETUP (ACTPASS);
+ } else if (g_strcmp0 (attr->value, "active") == 0) {
+ return SETUP (ACTIVE);
+ } else if (g_strcmp0 (attr->value, "passive") == 0) {
+ return SETUP (PASSIVE);
+ } else {
+ GST_ERROR ("unknown setup value %s", attr->value);
+ return SETUP (NONE);
+ }
+ }
+ }
+
+ GST_LOG ("no setup attribute in media");
+ return SETUP (NONE);
+ }
+
+ GstWebRTCDTLSSetup
+ _intersect_dtls_setup (GstWebRTCDTLSSetup offer)
+ {
+ switch (offer) {
+ case SETUP (NONE): /* default is active */
+ case SETUP (ACTPASS):
+ case SETUP (PASSIVE):
+ return SETUP (ACTIVE);
+ case SETUP (ACTIVE):
+ return SETUP (PASSIVE);
+ default:
+ return SETUP (NONE);
+ }
+ }
+
+ void
+ _media_replace_setup (GstSDPMedia * media, GstWebRTCDTLSSetup setup)
+ {
+ gchar *setup_str;
+ int i;
+
+ setup_str = _enum_value_to_string (GST_TYPE_WEBRTC_DTLS_SETUP, setup);
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "setup") == 0) {
+ GstSDPAttribute new_attr = { 0, };
+ GST_TRACE ("replace setup:%s with setup:%s", attr->value, setup_str);
+ gst_sdp_attribute_set (&new_attr, "setup", setup_str);
+ gst_sdp_media_replace_attribute (media, i, &new_attr);
++#ifdef __TIZEN__
++ g_free (setup_str);
++#endif
+ return;
+ }
+ }
+
+ GST_TRACE ("add setup:%s", setup_str);
+ gst_sdp_media_add_attribute (media, "setup", setup_str);
+ g_free (setup_str);
+ }
+
+ GstWebRTCDTLSSetup
+ _get_final_setup (GstWebRTCDTLSSetup local_setup,
+ GstWebRTCDTLSSetup remote_setup)
+ {
+ GstWebRTCDTLSSetup new_setup;
+
+ new_setup = SETUP (NONE);
+ switch (local_setup) {
+ case SETUP (NONE):
+ /* someone's done a bad job of mangling the SDP. or bugs */
+ g_critical ("Received a locally generated sdp without a parseable "
+ "\'a=setup\' line. This indicates a bug somewhere. Bailing");
+ return SETUP (NONE);
+ case SETUP (ACTIVE):
+ if (remote_setup == SETUP (ACTIVE)) {
+ GST_ERROR ("remote SDP has the same "
+ "\'a=setup:active\' attribute. This is not legal");
+ return SETUP (NONE);
+ }
+ new_setup = SETUP (ACTIVE);
+ break;
+ case SETUP (PASSIVE):
+ if (remote_setup == SETUP (PASSIVE)) {
+ GST_ERROR ("remote SDP has the same "
+ "\'a=setup:passive\' attribute. This is not legal");
+ return SETUP (NONE);
+ }
+ new_setup = SETUP (PASSIVE);
+ break;
+ case SETUP (ACTPASS):
+ if (remote_setup == SETUP (ACTPASS)) {
+ GST_ERROR ("remote SDP has the same "
+ "\'a=setup:actpass\' attribute. This is not legal");
+ return SETUP (NONE);
+ }
+ if (remote_setup == SETUP (ACTIVE))
+ new_setup = SETUP (PASSIVE);
+ else if (remote_setup == SETUP (PASSIVE))
+ new_setup = SETUP (ACTIVE);
+ else if (remote_setup == SETUP (NONE)) {
+ /* XXX: what to do here? */
+ GST_WARNING ("unspecified situation. local: "
+ "\'a=setup:actpass\' remote: none/unparseable");
+ new_setup = SETUP (ACTIVE);
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ return SETUP (NONE);
+ }
+ if (new_setup == SETUP (NONE)) {
+ GST_ERROR ("Abnormal situation!");
+ return SETUP (NONE);
+ }
+
+ return new_setup;
+ }
+
+ #undef SETUP
+
+ gchar *
+ _generate_fingerprint_from_certificate (gchar * certificate,
+ GChecksumType checksum_type)
+ {
+ gchar **lines, *line;
+ guchar *tmp, *decoded, *digest;
+ GChecksum *checksum;
+ GString *fingerprint;
+ gsize decoded_length, digest_size;
+ gint state = 0;
+ guint save = 0;
+ int i;
+
+ g_return_val_if_fail (certificate != NULL, NULL);
+
+ /* 1. decode the certificate removing newlines and the certificate header
+ * and footer */
+ decoded = tmp = g_new0 (guchar, (strlen (certificate) / 4) * 3 + 3);
+ lines = g_strsplit (certificate, "\n", 0);
+ for (i = 0, line = lines[i]; line; line = lines[++i]) {
+ if (line[0] && !g_str_has_prefix (line, "-----"))
+ tmp += g_base64_decode_step (line, strlen (line), tmp, &state, &save);
+ }
+ g_strfreev (lines);
+ decoded_length = tmp - decoded;
+
+ /* 2. compute a checksum of the decoded certificate */
+ checksum = g_checksum_new (checksum_type);
+ digest_size = g_checksum_type_get_length (checksum_type);
+ digest = g_new (guint8, digest_size);
+ g_checksum_update (checksum, decoded, decoded_length);
+ g_checksum_get_digest (checksum, digest, &digest_size);
+ g_free (decoded);
+
+ /* 3. hex encode the checksum separated with ':'s */
+ fingerprint = g_string_new (NULL);
+ for (i = 0; i < digest_size; i++) {
+ if (i)
+ g_string_append (fingerprint, ":");
+ g_string_append_printf (fingerprint, "%02X", digest[i]);
+ }
+
+ g_free (digest);
+ g_checksum_free (checksum);
+
+ return g_string_free (fingerprint, FALSE);
+ }
+
+ #define DEFAULT_ICE_UFRAG_LEN 32
+ #define DEFAULT_ICE_PASSWORD_LEN 32
+ static const gchar *ice_credential_chars =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789" "+/";
+
+ void
+ _generate_ice_credentials (gchar ** ufrag, gchar ** password)
+ {
+ int i;
+
+ *ufrag = g_malloc0 (DEFAULT_ICE_UFRAG_LEN + 1);
+ for (i = 0; i < DEFAULT_ICE_UFRAG_LEN; i++)
+ (*ufrag)[i] =
+ ice_credential_chars[g_random_int_range (0,
+ strlen (ice_credential_chars))];
+
+ *password = g_malloc0 (DEFAULT_ICE_PASSWORD_LEN + 1);
+ for (i = 0; i < DEFAULT_ICE_PASSWORD_LEN; i++)
+ (*password)[i] =
+ ice_credential_chars[g_random_int_range (0,
+ strlen (ice_credential_chars))];
+ }
+
+ int
+ _get_sctp_port_from_media (const GstSDPMedia * media)
+ {
+ int i;
+ const gchar *format;
+ gchar *endptr;
+
+ if (gst_sdp_media_formats_len (media) != 1) {
+ /* only exactly one format is supported */
+ return -1;
+ }
+
+ format = gst_sdp_media_get_format (media, 0);
+
+ if (g_strcmp0 (format, "webrtc-datachannel") == 0) {
+ /* draft-ietf-mmusic-sctp-sdp-21, e.g. Firefox 63 and later */
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "sctp-port") == 0) {
+ gint64 port = g_ascii_strtoll (attr->value, &endptr, 10);
+ if (endptr == attr->value) {
+ /* conversion error */
+ return -1;
+ }
+ return port;
+ }
+ }
+ } else {
+ /* draft-ietf-mmusic-sctp-sdp-05, e.g. Chrome as recent as 75 */
+ gint64 port = g_ascii_strtoll (format, &endptr, 10);
+ if (endptr == format) {
+ /* conversion error */
+ return -1;
+ }
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "sctpmap") == 0 && atoi (attr->value) == port) {
+ /* a=sctpmap:5000 webrtc-datachannel 256 */
+ gchar **parts = g_strsplit (attr->value, " ", 3);
+ if (!parts[1] || g_strcmp0 (parts[1], "webrtc-datachannel") != 0) {
+ port = -1;
+ }
+ g_strfreev (parts);
+ return port;
+ }
+ }
+ }
+
+ return -1;
+ }
+
+ guint64
+ _get_sctp_max_message_size_from_media (const GstSDPMedia * media)
+ {
+ int i;
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "max-message-size") == 0)
+ return atoi (attr->value);
+ }
+
+ return 65536;
+ }
+
+ gboolean
+ _message_media_is_datachannel (const GstSDPMessage * msg, guint media_id)
+ {
+ const GstSDPMedia *media;
+
+ if (!msg)
+ return FALSE;
+
+ if (gst_sdp_message_medias_len (msg) <= media_id)
+ return FALSE;
+
+ media = gst_sdp_message_get_media (msg, media_id);
+
+ if (g_strcmp0 (gst_sdp_media_get_media (media), "application") != 0)
+ return FALSE;
+
+ if (gst_sdp_media_formats_len (media) != 1)
+ return FALSE;
+
+ if (g_strcmp0 (gst_sdp_media_get_format (media, 0),
+ "webrtc-datachannel") != 0)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ guint
+ _message_get_datachannel_index (const GstSDPMessage * msg)
+ {
+ guint i;
+
+ for (i = 0; i < gst_sdp_message_medias_len (msg); i++) {
+ if (_message_media_is_datachannel (msg, i)) {
+ g_assert (i < G_MAXUINT);
+ return i;
+ }
+ }
+
+ return G_MAXUINT;
+ }
+
+ void
+ _get_ice_credentials_from_sdp_media (const GstSDPMessage * sdp, guint media_idx,
+ gchar ** ufrag, gchar ** pwd)
+ {
+ int i;
+
+ *ufrag = NULL;
+ *pwd = NULL;
+
+ {
+ /* search in the corresponding media section */
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, media_idx);
+ const gchar *tmp_ufrag =
+ gst_sdp_media_get_attribute_val (media, "ice-ufrag");
+ const gchar *tmp_pwd = gst_sdp_media_get_attribute_val (media, "ice-pwd");
+ if (tmp_ufrag && tmp_pwd) {
+ *ufrag = g_strdup (tmp_ufrag);
+ *pwd = g_strdup (tmp_pwd);
+ return;
+ }
+ }
+
+ /* then in the sdp message itself */
+ for (i = 0; i < gst_sdp_message_attributes_len (sdp); i++) {
+ const GstSDPAttribute *attr = gst_sdp_message_get_attribute (sdp, i);
+
+ if (g_strcmp0 (attr->key, "ice-ufrag") == 0) {
+ g_assert (!*ufrag);
+ *ufrag = g_strdup (attr->value);
+ } else if (g_strcmp0 (attr->key, "ice-pwd") == 0) {
+ g_assert (!*pwd);
+ *pwd = g_strdup (attr->value);
+ }
+ }
+ if (!*ufrag && !*pwd) {
+ /* Check in the medias themselves. According to JSEP, they should be
+ * identical FIXME: only for bundle-d streams */
+ for (i = 0; i < gst_sdp_message_medias_len (sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, i);
+ const gchar *tmp_ufrag =
+ gst_sdp_media_get_attribute_val (media, "ice-ufrag");
+ const gchar *tmp_pwd = gst_sdp_media_get_attribute_val (media, "ice-pwd");
+ if (tmp_ufrag && tmp_pwd) {
+ *ufrag = g_strdup (tmp_ufrag);
+ *pwd = g_strdup (tmp_pwd);
+ break;
+ }
+ }
+ }
+ }
+
+ gboolean
+ _parse_bundle (GstSDPMessage * sdp, GStrv * bundled, GError ** error)
+ {
+ const gchar *group;
+ gboolean ret = FALSE;
+
+ group = gst_sdp_message_get_attribute_val (sdp, "group");
+
+ if (group && g_str_has_prefix (group, "BUNDLE ")) {
+ *bundled = g_strsplit (group + strlen ("BUNDLE "), " ", 0);
+
+ if (!(*bundled)[0]) {
+ g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP,
+ "Invalid format for BUNDLE group, expected at least one mid (%s)",
+ group);
+ g_strfreev (*bundled);
+ *bundled = NULL;
+ goto done;
+ }
+ } else {
+ ret = TRUE;
+ goto done;
+ }
+
+ ret = TRUE;
+
+ done:
+ return ret;
+ }
+
+ gboolean
+ _get_bundle_index (GstSDPMessage * sdp, GStrv bundled, guint * idx)
+ {
+ gboolean ret = FALSE;
+ guint i;
+
+ for (i = 0; i < gst_sdp_message_medias_len (sdp); i++) {
+ const GstSDPMedia *media = gst_sdp_message_get_media (sdp, i);
+ const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid");
+
+ if (!g_strcmp0 (mid, bundled[0])) {
+ *idx = i;
+ ret = TRUE;
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ gboolean
+ _media_is_bundle_only (const GstSDPMedia * media)
+ {
+ int i;
+
+ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) {
+ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i);
+
+ if (g_strcmp0 (attr->key, "bundle-only") == 0) {
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+ }
--- /dev/null
- /* FIXME :
+ /* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ * Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:gstadaptivedemux
+ * @short_description: Base class for adaptive demuxers
+ *
+ * What is an adaptive demuxer?
+ * Adaptive demuxers are special demuxers in the sense that they don't
+ * actually demux data received from upstream but download the data
+ * themselves.
+ *
+ * Adaptive formats (HLS, DASH, MSS) are composed of a manifest file and
+ * a set of fragments. The manifest describes the available media and
+ * the sequence of fragments to use. Each fragment contains a small
+ * part of the media (typically only a few seconds). It is possible for
+ * the manifest to have the same media available in different configurations
+ * (bitrates for example) so that the client can select the one that
+ * best suits its scenario (network fluctuation, hardware requirements...).
+ * It is possible to switch from one representation of the media to another
+ * during playback. That's why it is called 'adaptive', because it can be
+ * adapted to the client's needs.
+ *
+ * Architectural overview:
+ * The manifest is received by the demuxer in its sink pad and, upon receiving
+ * EOS, it parses the manifest and exposes the streams available in it. For
+ * each stream a source element will be created and will download the list
+ * of fragments one by one. Once a fragment is finished downloading, the next
+ * URI is set to the source element and it starts fetching it and pushing
+ * through the stream's pad. This implies that each stream is independent from
+ * each other as it runs on a separate thread.
+ *
+ * After downloading each fragment, the download rate of it is calculated and
+ * the demuxer has a chance to switch to a different bitrate if needed. The
+ * switch can be done by simply pushing a new caps before the next fragment
+ * when codecs are the same, or by exposing a new pad group if it needs
+ * a codec change.
+ *
+ * Extra features:
+ * - Not linked streams: Streams that are not-linked have their download threads
+ * interrupted to save network bandwidth. When they are
+ * relinked a reconfigure event is received and the
+ * stream is restarted.
+ *
+ * Subclasses:
+ * While GstAdaptiveDemux is responsible for the workflow, it knows nothing
+ * about the intrinsics of the subclass formats, so the subclasses are
+ * responsible for maintaining the manifest data structures and stream
+ * information.
+ */
+
+ /*
+ MT safety.
+ The following rules were observed while implementing MT safety in adaptive demux:
+ 1. If a variable is accessed from multiple threads and at least one thread
+ writes to it, then all the accesses needs to be done from inside a critical section.
+ 2. If thread A wants to join thread B then at the moment it calls gst_task_join
+ it must not hold any mutexes that thread B might take.
+
+ Adaptive demux API can be called from several threads. More, adaptive demux
+ starts some threads to monitor the download of fragments. In order to protect
+ accesses to shared variables (demux and streams) all the API functions that
+ can be run in different threads will need to get a mutex (manifest_lock)
+ when they start and release it when they end. Because some of those functions
+ can indirectly call other API functions (eg they can generate events or messages
+ that are processed in the same thread) the manifest_lock must be recursive.
+
+ The manifest_lock will serialize the public API making access to shared
+ variables safe. But some of these functions will try at some moment to join
+ threads created by adaptive demux, or to change the state of src elements
+ (which will block trying to join the src element streaming thread). Because
+ of rule 2, those functions will need to release the manifest_lock during the
+ call of gst_task_join. During this time they can be interrupted by other API calls.
+ For example, during the precessing of a seek event, gst_adaptive_demux_stop_tasks
+ is called and this will join all threads. In order to prevent interruptions
+ during such period, all the API functions will also use a second lock: api_lock.
+ This will be taken at the beginning of the function and released at the end,
+ but this time this lock will not be temporarily released during join.
+ This lock will be used only by API calls (not by gst_adaptive_demux_stream_download_loop
+ or gst_adaptive_demux_updates_loop or _src_chain or _src_event) so it is safe
+ to hold it while joining the threads or changing the src element state. The
+ api_lock will serialise all external requests to adaptive demux. In order to
+ avoid deadlocks, if a function needs to acquire both manifest and api locks,
+ the api_lock will be taken first and the manifest_lock second.
+
+ By using the api_lock a thread is protected against other API calls. But when
+ temporarily dropping the manifest_lock, it will be vulnerable to changes from
+ threads that use only the manifest_lock and not the api_lock. These threads run
+ one of the following functions: gst_adaptive_demux_stream_download_loop,
+ gst_adaptive_demux_updates_loop, _src_chain, _src_event. In order to guarantee
+ that all operations during an API call are not impacted by other writes, the
+ above mentioned functions must check a cancelled flag every time they reacquire
+ the manifest_lock. If the flag is set, they must exit immediately, without
+ performing any changes on the shared data. In this way, an API call (eg seek
+ request) can set the cancel flag before releasing the manifest_lock and be sure
+ that the demux object and its streams are not changed by anybody else.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstadaptivedemux.h"
+ #include "gst/gst-i18n-plugin.h"
+ #include <gst/base/gstadapter.h>
+
+ GST_DEBUG_CATEGORY (adaptivedemux_debug);
+ #define GST_CAT_DEFAULT adaptivedemux_debug
+
+ #define MAX_DOWNLOAD_ERROR_COUNT 3
+ #define DEFAULT_FAILED_COUNT 3
+ #define DEFAULT_CONNECTION_SPEED 0
+ #define DEFAULT_BITRATE_LIMIT 0.8f
+ #define SRC_QUEUE_MAX_BYTES 20 * 1024 * 1024 /* For safety. Large enough to hold a segment. */
+ #define NUM_LOOKBACK_FRAGMENTS 3
+
+ #define GST_MANIFEST_GET_LOCK(d) (&(GST_ADAPTIVE_DEMUX_CAST(d)->priv->manifest_lock))
+ #define GST_MANIFEST_LOCK(d) G_STMT_START { \
+ GST_TRACE("Locking from thread %p", g_thread_self()); \
+ g_rec_mutex_lock (GST_MANIFEST_GET_LOCK (d)); \
+ GST_TRACE("Locked from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+ #define GST_MANIFEST_UNLOCK(d) G_STMT_START { \
+ GST_TRACE("Unlocking from thread %p", g_thread_self()); \
+ g_rec_mutex_unlock (GST_MANIFEST_GET_LOCK (d)); \
+ } G_STMT_END
+
+ #define GST_API_GET_LOCK(d) (&(GST_ADAPTIVE_DEMUX_CAST(d)->priv->api_lock))
+ #define GST_API_LOCK(d) g_mutex_lock (GST_API_GET_LOCK (d));
+ #define GST_API_UNLOCK(d) g_mutex_unlock (GST_API_GET_LOCK (d));
+
+ #define GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK(d) (&GST_ADAPTIVE_DEMUX_CAST(d)->priv->segment_lock)
+ #define GST_ADAPTIVE_DEMUX_SEGMENT_LOCK(d) g_mutex_lock (GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK (d))
+ #define GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK(d) g_mutex_unlock (GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK (d))
+
+ enum
+ {
+ PROP_0,
+ PROP_CONNECTION_SPEED,
+ PROP_BITRATE_LIMIT,
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ PROP_START_BANDWIDTH,
++ PROP_MIN_BANDWIDTH,
++ PROP_MAX_BANDWIDTH,
++ PROP_MAX_WIDTH,
++ PROP_MAX_HEIGHT,
++#endif
+ PROP_LAST
+ };
+
+ /* Internal, so not using GST_FLOW_CUSTOM_SUCCESS_N */
+ #define GST_ADAPTIVE_DEMUX_FLOW_SWITCH (GST_FLOW_CUSTOM_SUCCESS_2 + 1)
+
+ struct _GstAdaptiveDemuxPrivate
+ {
+ GstAdapter *input_adapter; /* protected by manifest_lock */
+ gint have_manifest; /* MT safe */
+
+ GList *old_streams; /* protected by manifest_lock */
+
+ GstTask *updates_task; /* MT safe */
+ GRecMutex updates_lock;
+ GMutex updates_timed_lock;
+ GCond updates_timed_cond; /* protected by updates_timed_lock */
+ gboolean stop_updates_task; /* protected by updates_timed_lock */
+
+ /* used only from updates_task, no need to protect it */
+ gint update_failed_count;
+
+ guint32 segment_seqnum; /* protected by manifest_lock */
+
+ /* main lock used to protect adaptive demux and all its streams.
+ * It serializes the adaptive demux public API.
+ */
+ GRecMutex manifest_lock;
+
+ /* condition to wait for manifest updates on a live stream.
+ * In order to signal the manifest_cond, the caller needs to hold both
+ * manifest_lock and manifest_update_lock (taken in this order)
+ */
+ GCond manifest_cond;
+ GMutex manifest_update_lock;
+
+ /* Lock and condition for prerolling streams before exposing */
+ GMutex preroll_lock;
+ GCond preroll_cond;
+ gint preroll_pending;
+
+ GMutex api_lock;
+
+ /* Protects demux and stream segment information
+ * Needed because seeks can update segment information
+ * without needing to stop tasks when they just want to
+ * update the segment boundaries */
+ GMutex segment_lock;
+
+ GstClockTime qos_earliest_time;
+ };
+
+ typedef struct _GstAdaptiveDemuxTimer
+ {
+ gint ref_count;
+ GCond *cond;
+ GMutex *mutex;
+ GstClockID clock_id;
+ gboolean fired;
+ } GstAdaptiveDemuxTimer;
+
+ static GstBinClass *parent_class = NULL;
+ static gint private_offset = 0;
+
+ static void gst_adaptive_demux_class_init (GstAdaptiveDemuxClass * klass);
+ static void gst_adaptive_demux_init (GstAdaptiveDemux * dec,
+ GstAdaptiveDemuxClass * klass);
+ static void gst_adaptive_demux_finalize (GObject * object);
+ static GstStateChangeReturn gst_adaptive_demux_change_state (GstElement *
+ element, GstStateChange transition);
+
+ static void gst_adaptive_demux_handle_message (GstBin * bin, GstMessage * msg);
+
+ static gboolean gst_adaptive_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static GstFlowReturn gst_adaptive_demux_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+ static gboolean gst_adaptive_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_adaptive_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static gboolean
+ gst_adaptive_demux_push_src_event (GstAdaptiveDemux * demux, GstEvent * event);
+
+ static void gst_adaptive_demux_updates_loop (GstAdaptiveDemux * demux);
+ static void gst_adaptive_demux_stream_download_loop (GstAdaptiveDemuxStream *
+ stream);
+ static void gst_adaptive_demux_reset (GstAdaptiveDemux * demux);
+ static gboolean gst_adaptive_demux_prepare_streams (GstAdaptiveDemux * demux,
+ gboolean first_and_live);
+ static gboolean gst_adaptive_demux_expose_streams (GstAdaptiveDemux * demux);
+ static gboolean gst_adaptive_demux_is_live (GstAdaptiveDemux * demux);
+ static GstFlowReturn gst_adaptive_demux_stream_seek (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, gboolean forward, GstSeekFlags flags,
+ GstClockTime ts, GstClockTime * final_ts);
+ static gboolean gst_adaptive_demux_stream_has_next_fragment (GstAdaptiveDemux *
+ demux, GstAdaptiveDemuxStream * stream);
+ static gboolean gst_adaptive_demux_stream_select_bitrate (GstAdaptiveDemux *
+ demux, GstAdaptiveDemuxStream * stream, guint64 bitrate);
+ static GstFlowReturn
+ gst_adaptive_demux_stream_update_fragment_info (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static gint64
+ gst_adaptive_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn gst_adaptive_demux_update_manifest (GstAdaptiveDemux *
+ demux);
+ static GstFlowReturn
+ gst_adaptive_demux_update_manifest_default (GstAdaptiveDemux * demux);
+ static gboolean gst_adaptive_demux_has_next_period (GstAdaptiveDemux * demux);
+ static void gst_adaptive_demux_advance_period (GstAdaptiveDemux * demux);
+
+ static void gst_adaptive_demux_stream_free (GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn
+ gst_adaptive_demux_stream_push_event (GstAdaptiveDemuxStream * stream,
+ GstEvent * event);
+
+ static void gst_adaptive_demux_stop_manifest_update_task (GstAdaptiveDemux *
+ demux);
+ static void gst_adaptive_demux_start_manifest_update_task (GstAdaptiveDemux *
+ demux);
+
+ static void gst_adaptive_demux_start_tasks (GstAdaptiveDemux * demux,
+ gboolean start_preroll_streams);
+ static void gst_adaptive_demux_stop_tasks (GstAdaptiveDemux * demux,
+ gboolean stop_updates);
+ static GstFlowReturn gst_adaptive_demux_combine_flows (GstAdaptiveDemux *
+ demux);
+ static void
+ gst_adaptive_demux_stream_fragment_download_finish (GstAdaptiveDemuxStream *
+ stream, GstFlowReturn ret, GError * err);
+ static GstFlowReturn
+ gst_adaptive_demux_stream_data_received_default (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer);
+ static GstFlowReturn
+ gst_adaptive_demux_stream_finish_fragment_default (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream);
+ static GstFlowReturn
+ gst_adaptive_demux_stream_advance_fragment_unlocked (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstClockTime duration);
+ static gboolean
+ gst_adaptive_demux_wait_until (GstClock * clock, GCond * cond, GMutex * mutex,
+ GstClockTime end_time);
+ static gboolean gst_adaptive_demux_clock_callback (GstClock * clock,
+ GstClockTime time, GstClockID id, gpointer user_data);
+ static gboolean
+ gst_adaptive_demux_requires_periodical_playlist_update_default (GstAdaptiveDemux
+ * demux);
+
+ /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
+ * method to get to the padtemplates */
+ GType
+ gst_adaptive_demux_get_type (void)
+ {
+ static gsize type = 0;
+
+ if (g_once_init_enter (&type)) {
+ GType _type;
+ static const GTypeInfo info = {
+ sizeof (GstAdaptiveDemuxClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_adaptive_demux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAdaptiveDemux),
+ 0,
+ (GInstanceInitFunc) gst_adaptive_demux_init,
+ };
+
+ _type = g_type_register_static (GST_TYPE_BIN,
+ "GstAdaptiveDemux", &info, G_TYPE_FLAG_ABSTRACT);
+
+ private_offset =
+ g_type_add_instance_private (_type, sizeof (GstAdaptiveDemuxPrivate));
+
+ g_once_init_leave (&type, _type);
+ }
+ return type;
+ }
+
+ static inline GstAdaptiveDemuxPrivate *
+ gst_adaptive_demux_get_instance_private (GstAdaptiveDemux * self)
+ {
+ return (G_STRUCT_MEMBER_P (self, private_offset));
+ }
+
+ static void
+ gst_adaptive_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX (object);
+
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+
+ switch (prop_id) {
+ case PROP_CONNECTION_SPEED:
+ demux->connection_speed = g_value_get_uint (value) * 1000;
+ GST_DEBUG_OBJECT (demux, "Connection speed set to %u",
+ demux->connection_speed);
+ break;
+ case PROP_BITRATE_LIMIT:
+ demux->bitrate_limit = g_value_get_float (value);
+ break;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ case PROP_START_BANDWIDTH:
++ g_free(demux->start_bandwidth);
++ demux->start_bandwidth = g_strdup (g_value_get_string (value));
++ break;
++ case PROP_MIN_BANDWIDTH:
++ demux->min_bandwidth = g_value_get_int (value);
++ break;
++ case PROP_MAX_BANDWIDTH:
++ demux->max_bandwidth = g_value_get_int (value);
++ break;
++ case PROP_MAX_WIDTH:
++ demux->max_width = g_value_get_int (value);
++ break;
++ case PROP_MAX_HEIGHT:
++ demux->max_height = g_value_get_int (value);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ }
+
+ static void
+ gst_adaptive_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX (object);
+
+ GST_MANIFEST_LOCK (demux);
+
+ switch (prop_id) {
+ case PROP_CONNECTION_SPEED:
+ g_value_set_uint (value, demux->connection_speed / 1000);
+ break;
+ case PROP_BITRATE_LIMIT:
+ g_value_set_float (value, demux->bitrate_limit);
+ break;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ case PROP_START_BANDWIDTH:
++ if (demux->start_bandwidth == NULL)
++ g_value_set_static_string (value, "");
++ else
++ g_value_set_string (value, demux->start_bandwidth);
++ break;
++ case PROP_MIN_BANDWIDTH:
++ g_value_set_int (value, demux->min_bandwidth);
++ break;
++ case PROP_MAX_BANDWIDTH:
++ g_value_set_int (value, demux->max_bandwidth);
++ break;
++ case PROP_MAX_WIDTH:
++ g_value_set_int (value, demux->max_width);
++ break;
++ case PROP_MAX_HEIGHT:
++ g_value_set_int (value, demux->max_height);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ }
+
+ static void
+ gst_adaptive_demux_class_init (GstAdaptiveDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstbin_class = GST_BIN_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (adaptivedemux_debug, "adaptivedemux", 0,
+ "Base Adaptive Demux");
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ if (private_offset != 0)
+ g_type_class_adjust_private_offset (klass, &private_offset);
+
+ gobject_class->set_property = gst_adaptive_demux_set_property;
+ gobject_class->get_property = gst_adaptive_demux_get_property;
+ gobject_class->finalize = gst_adaptive_demux_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
+ g_param_spec_uint ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = calculate from downloaded"
+ " fragments)", 0, G_MAXUINT / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /* FIXME 2.0: rename this property to bandwidth-usage or any better name */
+ g_object_class_install_property (gobject_class, PROP_BITRATE_LIMIT,
+ g_param_spec_float ("bitrate-limit",
+ "Bitrate limit in %",
+ "Limit of the available bitrate to use when switching to alternates.",
+ 0, 1, DEFAULT_BITRATE_LIMIT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ g_object_class_install_property (gobject_class, PROP_START_BANDWIDTH,
++ g_param_spec_string ("start-bandwidth", "Start Bandwidth",
++ "Set bandwidth to use when the playback is started. (LOWEST, HIGHEST, AVERAGE or specific bandwidth value)",
++ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_MIN_BANDWIDTH,
++ g_param_spec_int ("min-bandwidth",
++ "Min Bandwidth limit",
++ "Minimum limit of the available bandwidth to use when switching to alternates. (-1 = no limit)",
++ -1, G_MAXINT, DEFAULT_BANDWIDTH_LIMIT,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_MAX_BANDWIDTH,
++ g_param_spec_int ("max-bandwidth",
++ "Max Bandwidth limit",
++ "Maximum limit of the available bandwidth to use when switching to alternates. (-1 = no limit)",
++ -1, G_MAXINT, DEFAULT_BANDWIDTH_LIMIT,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_MAX_WIDTH,
++ g_param_spec_int ("max-video-width",
++ "Max video width limit",
++ "Maximum limit of the available video width to use when switching to alternates. (-1 = no limit)",
++ -1, G_MAXINT, DEFAULT_RESOLUTION_LIMIT,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class, PROP_MAX_HEIGHT,
++ g_param_spec_int ("max-video-height",
++ "Max video height limit",
++ "Maximum limit of the available video height to use when switching to alternates. (-1 = no limit)",
++ -1, G_MAXINT, DEFAULT_RESOLUTION_LIMIT,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
++
+ gstelement_class->change_state = gst_adaptive_demux_change_state;
+
+ gstbin_class->handle_message = gst_adaptive_demux_handle_message;
+
+ klass->data_received = gst_adaptive_demux_stream_data_received_default;
+ klass->finish_fragment = gst_adaptive_demux_stream_finish_fragment_default;
+ klass->update_manifest = gst_adaptive_demux_update_manifest_default;
+ klass->requires_periodical_playlist_update =
+ gst_adaptive_demux_requires_periodical_playlist_update_default;
+
+ }
+
+ static void
+ gst_adaptive_demux_init (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxClass * klass)
+ {
+ GstPadTemplate *pad_template;
+ GstClockType clock_type = GST_CLOCK_TYPE_OTHER;
+ GObjectClass *gobject_class;
+
+ GST_DEBUG_OBJECT (demux, "gst_adaptive_demux_init");
+
+ demux->priv = gst_adaptive_demux_get_instance_private (demux);
+ demux->priv->input_adapter = gst_adapter_new ();
+ demux->downloader = gst_uri_downloader_new ();
+ gst_uri_downloader_set_parent (demux->downloader, GST_ELEMENT_CAST (demux));
+ demux->stream_struct_size = sizeof (GstAdaptiveDemuxStream);
+ demux->priv->segment_seqnum = gst_util_seqnum_next ();
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ gst_bin_set_suppressed_flags (GST_BIN_CAST (demux),
+ GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+
+ demux->realtime_clock = gst_system_clock_obtain ();
+ g_assert (demux->realtime_clock != NULL);
+ gobject_class = G_OBJECT_GET_CLASS (demux->realtime_clock);
+ if (g_object_class_find_property (gobject_class, "clock-type")) {
+ g_object_get (demux->realtime_clock, "clock-type", &clock_type, NULL);
+ } else {
+ GST_WARNING_OBJECT (demux,
+ "System clock does not have clock-type property");
+ }
+ if (clock_type == GST_CLOCK_TYPE_REALTIME) {
+ demux->clock_offset = 0;
+ } else {
+ GDateTime *utc_now;
+ GstClockTime rtc_now;
+
+ utc_now = g_date_time_new_now_utc ();
+ rtc_now = gst_clock_get_time (demux->realtime_clock);
+ demux->clock_offset =
+ g_date_time_to_unix (utc_now) * G_TIME_SPAN_SECOND +
+ g_date_time_get_microsecond (utc_now) - GST_TIME_AS_USECONDS (rtc_now);
+ g_date_time_unref (utc_now);
+ }
+ g_rec_mutex_init (&demux->priv->updates_lock);
+ demux->priv->updates_task =
+ gst_task_new ((GstTaskFunction) gst_adaptive_demux_updates_loop,
+ demux, NULL);
+ gst_task_set_lock (demux->priv->updates_task, &demux->priv->updates_lock);
+
+ g_mutex_init (&demux->priv->updates_timed_lock);
+ g_cond_init (&demux->priv->updates_timed_cond);
+
+ g_cond_init (&demux->priv->manifest_cond);
+ g_mutex_init (&demux->priv->manifest_update_lock);
+
+ g_rec_mutex_init (&demux->priv->manifest_lock);
+ g_mutex_init (&demux->priv->api_lock);
+ g_mutex_init (&demux->priv->segment_lock);
+
+ g_cond_init (&demux->priv->preroll_cond);
+ g_mutex_init (&demux->priv->preroll_lock);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
+ g_return_if_fail (pad_template != NULL);
+
+ demux->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_adaptive_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_adaptive_demux_sink_chain));
+
+ /* Properties */
+ demux->bitrate_limit = DEFAULT_BITRATE_LIMIT;
+ demux->connection_speed = DEFAULT_CONNECTION_SPEED;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ demux->start_bandwidth = NULL;
++ demux->min_bandwidth = DEFAULT_BANDWIDTH_LIMIT;
++ demux->max_bandwidth = DEFAULT_BANDWIDTH_LIMIT;
++ demux->max_width = DEFAULT_RESOLUTION_LIMIT;
++ demux->max_height = DEFAULT_RESOLUTION_LIMIT;
++#endif
+
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+ }
+
+ static void
+ gst_adaptive_demux_finalize (GObject * object)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (object);
+ GstAdaptiveDemuxPrivate *priv = demux->priv;
+
+ GST_DEBUG_OBJECT (object, "finalize");
+
+ g_object_unref (priv->input_adapter);
+ g_object_unref (demux->downloader);
+
+ g_mutex_clear (&priv->updates_timed_lock);
+ g_cond_clear (&priv->updates_timed_cond);
+ g_mutex_clear (&demux->priv->manifest_update_lock);
+ g_cond_clear (&demux->priv->manifest_cond);
+ g_object_unref (priv->updates_task);
+ g_rec_mutex_clear (&priv->updates_lock);
+ g_rec_mutex_clear (&demux->priv->manifest_lock);
+ g_mutex_clear (&demux->priv->api_lock);
+ g_mutex_clear (&demux->priv->segment_lock);
+ if (demux->realtime_clock) {
+ gst_object_unref (demux->realtime_clock);
+ demux->realtime_clock = NULL;
+ }
+
+ g_cond_clear (&demux->priv->preroll_cond);
+ g_mutex_clear (&demux->priv->preroll_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstStateChangeReturn
+ gst_adaptive_demux_change_state (GstElement * element,
+ GstStateChange transition)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (element);
+ GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (g_atomic_int_compare_and_exchange (&demux->running, TRUE, FALSE))
+ GST_DEBUG_OBJECT (demux, "demuxer has stopped running");
+ gst_uri_downloader_cancel (demux->downloader);
+
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+ gst_adaptive_demux_reset (demux);
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+ gst_adaptive_demux_reset (demux);
+ /* Clear "cancelled" flag in uridownloader since subclass might want to
+ * use uridownloader to fetch another manifest */
+ gst_uri_downloader_reset (demux->downloader);
+ if (g_atomic_int_get (&demux->priv->have_manifest))
+ gst_adaptive_demux_start_manifest_update_task (demux);
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ if (g_atomic_int_compare_and_exchange (&demux->running, FALSE, TRUE))
+ GST_DEBUG_OBJECT (demux, "demuxer has started running");
+ break;
+ default:
+ break;
+ }
+
+ /* this must be run without MANIFEST_LOCK taken.
+ * For PLAYING to PLAYING state changes, it will want to take a lock in
+ * src element and that lock is held while the streaming thread is running.
+ * The streaming thread will take the MANIFEST_LOCK, leading to a deadlock.
+ */
+ result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return result;
+ }
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++static gboolean
++gst_adaptive_demux_check_http_header (GQuark field_id, const GValue * value,
++ gpointer data)
++{
++ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (data);
++
++ if (value
++ && g_ascii_strcasecmp (g_quark_to_string (field_id), "User-Agent") == 0) {
++ if (demux->user_agent)
++ g_free (demux->user_agent);
++ demux->user_agent = g_value_dup_string (value);
++ GST_INFO_OBJECT (demux, "User-Agent : %s",
++ (demux->user_agent) ? (demux->user_agent) : NULL);
++ }
++
++ if (value
++ && g_ascii_strcasecmp (g_quark_to_string (field_id), "Set-Cookie") == 0) {
++ guint i = 0;
++ gchar **cookies =
++ (gchar **) g_malloc0 ((gst_value_array_get_size (value) +
++ 1) * sizeof (gchar *));
++
++ for (i = 0; i < gst_value_array_get_size (value); i++) {
++ GST_INFO_OBJECT (demux, "Cookie : %s",
++ g_value_get_string (gst_value_array_get_value (value, i)));
++ cookies[i] = g_value_dup_string (gst_value_array_get_value (value, i));
++ }
++ cookies[i] = NULL;
++ if (demux->cookies)
++ g_strfreev (demux->cookies);
++ demux->cookies = g_strdupv (cookies);
++ g_strfreev (cookies);
++ }
++ return TRUE;
++}
++#endif
++
+ static gboolean
+ gst_adaptive_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+ gboolean ret;
+
+ switch (event->type) {
+ case GST_EVENT_FLUSH_STOP:{
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+
+ gst_adaptive_demux_reset (demux);
+
+ ret = gst_pad_event_default (pad, parent, event);
+
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+
+ return ret;
+ }
+ case GST_EVENT_EOS:{
+ GstAdaptiveDemuxClass *demux_class;
+ GstQuery *query;
+ gboolean query_res;
+ gboolean ret = TRUE;
+ gsize available;
+ GstBuffer *manifest_buffer;
+
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+
+ demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ available = gst_adapter_available (demux->priv->input_adapter);
+
+ if (available == 0) {
+ GST_WARNING_OBJECT (demux, "Received EOS without a manifest.");
+ ret = gst_pad_event_default (pad, parent, event);
+
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+
+ return ret;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Got EOS on the sink pad: manifest fetched");
+
+ /* Need to get the URI to use it as a base to generate the fragment's
+ * uris */
+ query = gst_query_new_uri ();
+ query_res = gst_pad_peer_query (pad, query);
+ if (query_res) {
+ gchar *uri, *redirect_uri;
+ gboolean permanent;
+
+ gst_query_parse_uri (query, &uri);
+ gst_query_parse_uri_redirection (query, &redirect_uri);
+ gst_query_parse_uri_redirection_permanent (query, &permanent);
+
+ if (permanent && redirect_uri) {
+ demux->manifest_uri = redirect_uri;
+ demux->manifest_base_uri = NULL;
+ g_free (uri);
+ } else {
+ demux->manifest_uri = uri;
+ demux->manifest_base_uri = redirect_uri;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Fetched manifest at URI: %s (base: %s)",
+ demux->manifest_uri, GST_STR_NULL (demux->manifest_base_uri));
+ } else {
+ GST_WARNING_OBJECT (demux, "Upstream URI query failed.");
+ }
+ gst_query_unref (query);
+
+ /* Let the subclass parse the manifest */
+ manifest_buffer =
+ gst_adapter_take_buffer (demux->priv->input_adapter, available);
+ if (!demux_class->process_manifest (demux, manifest_buffer)) {
+ /* In most cases, this will happen if we set a wrong url in the
+ * source element and we have received the 404 HTML response instead of
+ * the manifest */
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid manifest."),
+ (NULL));
+ ret = FALSE;
+ } else {
+ g_atomic_int_set (&demux->priv->have_manifest, TRUE);
+ }
+ gst_buffer_unref (manifest_buffer);
+
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+ "manifest-uri", G_TYPE_STRING,
+ demux->manifest_uri, "uri", G_TYPE_STRING,
+ demux->manifest_uri,
+ "manifest-download-start", GST_TYPE_CLOCK_TIME,
+ GST_CLOCK_TIME_NONE,
+ "manifest-download-stop", GST_TYPE_CLOCK_TIME,
+ gst_util_get_timestamp (), NULL)));
+
+ if (ret) {
+ /* Send duration message */
+ if (!gst_adaptive_demux_is_live (demux)) {
+ GstClockTime duration = demux_class->get_duration (demux);
+
+ if (duration != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux,
+ "Sending duration message : %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration));
+ gst_element_post_message (GST_ELEMENT (demux),
+ gst_message_new_duration_changed (GST_OBJECT (demux)));
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "media duration unknown, can not send the duration message");
+ }
+ }
+
+ if (demux->next_streams) {
+ gst_adaptive_demux_prepare_streams (demux,
+ gst_adaptive_demux_is_live (demux));
+ gst_adaptive_demux_start_tasks (demux, TRUE);
+ gst_adaptive_demux_start_manifest_update_task (demux);
+ } else {
+ /* no streams */
+ GST_WARNING_OBJECT (demux, "No streams created from manifest");
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file contains no playable streams.")),
+ ("No known stream formats found at the Manifest"));
+ ret = FALSE;
+ }
+
+ }
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+
+ gst_event_unref (event);
+ return ret;
+ }
+ case GST_EVENT_SEGMENT:
+ /* Swallow newsegments, we'll push our own */
+ gst_event_unref (event);
+ return TRUE;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:{
++ const GstStructure *structure;
++ GstStructure *req_headers = NULL;
++ GstStructure *res_headers = NULL;
++
++ structure = gst_event_get_structure (event);
++ if (gst_structure_has_name (structure, "http-headers")) {
++ if (gst_structure_has_field (structure, "request-headers")) {
++ gst_structure_get (structure, "request-headers", GST_TYPE_STRUCTURE,
++ &req_headers, NULL);
++ gst_structure_foreach (req_headers,
++ gst_adaptive_demux_check_http_header, demux);
++ gst_structure_free (req_headers);
++ }
++ if (gst_structure_has_field (structure, "response-headers")) {
++ gst_structure_get (structure, "response-headers", GST_TYPE_STRUCTURE,
++ &res_headers, NULL);
++ gst_structure_foreach (res_headers,
++ gst_adaptive_demux_check_http_header, demux);
++ gst_structure_free (res_headers);
++ }
++ }
++ break;
++ }
++#endif
++
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static GstFlowReturn
+ gst_adaptive_demux_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+
+ GST_MANIFEST_LOCK (demux);
+
+ gst_adapter_push (demux->priv->input_adapter, buffer);
+
+ GST_INFO_OBJECT (demux, "Received manifest buffer, total size is %i bytes",
+ (gint) gst_adapter_available (demux->priv->input_adapter));
+
+ GST_MANIFEST_UNLOCK (demux);
+ return GST_FLOW_OK;
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_reset (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ GList *iter;
+ GList *old_streams;
+ GstEvent *eos;
+
+ /* take ownership of old_streams before releasing the manifest_lock in
+ * gst_adaptive_demux_stop_tasks
+ */
+ old_streams = demux->priv->old_streams;
+ demux->priv->old_streams = NULL;
+
+ gst_adaptive_demux_stop_tasks (demux, TRUE);
+
+ if (klass->reset)
+ klass->reset (demux);
+
+ eos = gst_event_new_eos ();
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ if (stream->pad) {
+ gst_pad_push_event (stream->pad, gst_event_ref (eos));
+ gst_pad_set_active (stream->pad, FALSE);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->pad);
+ }
+ gst_adaptive_demux_stream_free (stream);
+ }
+ gst_event_unref (eos);
+ g_list_free (demux->streams);
+ demux->streams = NULL;
+ if (demux->prepared_streams) {
+ g_list_free_full (demux->prepared_streams,
+ (GDestroyNotify) gst_adaptive_demux_stream_free);
+ demux->prepared_streams = NULL;
+ }
+ if (demux->next_streams) {
+ g_list_free_full (demux->next_streams,
+ (GDestroyNotify) gst_adaptive_demux_stream_free);
+ demux->next_streams = NULL;
+ }
+
+ if (old_streams) {
+ g_list_free_full (old_streams,
+ (GDestroyNotify) gst_adaptive_demux_stream_free);
+ }
+
+ if (demux->priv->old_streams) {
+ g_list_free_full (demux->priv->old_streams,
+ (GDestroyNotify) gst_adaptive_demux_stream_free);
+ demux->priv->old_streams = NULL;
+ }
+
+ g_free (demux->manifest_uri);
+ g_free (demux->manifest_base_uri);
+ demux->manifest_uri = NULL;
+ demux->manifest_base_uri = NULL;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ if (demux->user_agent)
++ g_free (demux->user_agent);
++ if (demux->cookies)
++ g_strfreev (demux->cookies);
++ demux->user_agent = NULL;
++ demux->cookies = NULL;
++#endif
+
+ gst_adapter_clear (demux->priv->input_adapter);
+ g_atomic_int_set (&demux->priv->have_manifest, FALSE);
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+ demux->priv->segment_seqnum = gst_util_seqnum_next ();
+ }
+
+ static void
+ gst_adaptive_demux_handle_message (GstBin * bin, GstMessage * msg)
+ {
+ GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (bin);
+
+ switch (GST_MESSAGE_TYPE (msg)) {
+ case GST_MESSAGE_ERROR:{
+ GList *iter;
+ GstAdaptiveDemuxStream *stream = NULL;
+ GError *err = NULL;
+ gchar *debug = NULL;
+ gchar *new_error = NULL;
+ const GstStructure *details = NULL;
+
+ GST_MANIFEST_LOCK (demux);
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *cur = iter->data;
+ if (gst_object_has_as_ancestor (GST_MESSAGE_SRC (msg),
+ GST_OBJECT_CAST (cur->src))) {
+ stream = cur;
+ break;
+ }
+ }
+ if (stream == NULL) {
+ for (iter = demux->prepared_streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *cur = iter->data;
+ if (gst_object_has_as_ancestor (GST_MESSAGE_SRC (msg),
+ GST_OBJECT_CAST (cur->src))) {
+ stream = cur;
+ break;
+ }
+ }
+ if (stream == NULL) {
+ GST_WARNING_OBJECT (demux,
+ "Failed to locate stream for errored element");
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ GST_MANIFEST_UNLOCK (demux);
++#endif
+ break;
+ }
+ }
+
+ gst_message_parse_error (msg, &err, &debug);
+
+ GST_WARNING_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "Source posted error: %d:%d %s (%s)", err->domain, err->code,
+ err->message, debug);
+
+ if (debug)
+ new_error = g_strdup_printf ("%s: %s\n", err->message, debug);
+ if (new_error) {
+ g_free (err->message);
+ err->message = new_error;
+ }
+
+ gst_message_parse_error_details (msg, &details);
+ if (details) {
+ gst_structure_get_uint (details, "http-status-code",
+ &stream->last_status_code);
+ }
+
+ /* error, but ask to retry */
+ gst_adaptive_demux_stream_fragment_download_finish (stream,
+ GST_FLOW_CUSTOM_ERROR, err);
+
+ g_error_free (err);
+ g_free (debug);
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ gst_message_unref (msg);
+ msg = NULL;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (msg)
+ GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
+ }
+
+ void
+ gst_adaptive_demux_set_stream_struct_size (GstAdaptiveDemux * demux,
+ gsize struct_size)
+ {
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+ demux->stream_struct_size = struct_size;
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_prepare_stream (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstPad *pad = stream->pad;
+ gchar *name = gst_pad_get_name (pad);
+ GstEvent *event;
+ gchar *stream_id;
+
+ gst_pad_set_active (pad, TRUE);
+ stream->need_header = TRUE;
+
+ stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT_CAST (demux), name);
+
+ event =
+ gst_pad_get_sticky_event (GST_ADAPTIVE_DEMUX_SINK_PAD (demux),
+ GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (event, demux->group_id);
+
+ gst_pad_push_event (pad, event);
+ g_free (stream_id);
+ g_free (name);
+
+ GST_DEBUG_OBJECT (demux, "Preparing srcpad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ stream->discont = TRUE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_adaptive_demux_expose_stream (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ gboolean ret;
+ GstPad *pad = stream->pad;
+ GstCaps *caps;
+
+ if (stream->pending_caps) {
+ gst_pad_set_caps (pad, stream->pending_caps);
+ caps = stream->pending_caps;
+ stream->pending_caps = NULL;
+ } else {
+ caps = gst_pad_get_current_caps (pad);
+ }
+
+ GST_DEBUG_OBJECT (demux, "Exposing srcpad %s:%s with caps %" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+ if (caps)
+ gst_caps_unref (caps);
+
+ gst_object_ref (pad);
+
+ /* Don't hold the manifest lock while exposing a pad */
+ GST_MANIFEST_UNLOCK (demux);
+ ret = gst_element_add_pad (GST_ELEMENT_CAST (demux), pad);
+ GST_MANIFEST_LOCK (demux);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstClockTime
+ gst_adaptive_demux_stream_get_presentation_offset (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemuxClass *klass;
+
+ klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->get_presentation_offset == NULL)
+ return 0;
+
+ return klass->get_presentation_offset (demux, stream);
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstClockTime
+ gst_adaptive_demux_get_period_start_time (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass;
+
+ klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->get_period_start_time == NULL)
+ return 0;
+
+ return klass->get_period_start_time (demux);
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_prepare_streams (GstAdaptiveDemux * demux,
+ gboolean first_and_live)
+ {
+ GList *iter;
+ GstClockTime period_start, min_pts = GST_CLOCK_TIME_NONE;
+
+ g_return_val_if_fail (demux->next_streams != NULL, FALSE);
+ if (demux->prepared_streams != NULL) {
+ /* Old streams that were never exposed, due to a seek or so */
+ GST_FIXME_OBJECT (demux,
+ "Preparing new streams without cleaning up old ones!");
+ return FALSE;
+ }
+
+ demux->prepared_streams = demux->next_streams;
+ demux->next_streams = NULL;
+
+ if (!gst_adaptive_demux_is_running (demux)) {
+ GST_DEBUG_OBJECT (demux, "Not exposing pads due to shutdown");
+ return TRUE;
+ }
+
+ for (iter = demux->prepared_streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ stream->do_block = TRUE;
+
+ if (!gst_adaptive_demux_prepare_stream (demux,
+ GST_ADAPTIVE_DEMUX_STREAM_CAST (stream))) {
+ /* TODO act on error */
+ GST_FIXME_OBJECT (stream->pad,
+ "Do something on failure to expose stream");
+ }
+
+ if (first_and_live) {
+ /* TODO we only need the first timestamp, maybe create a simple function to
+ * get the current PTS of a fragment ? */
+ GST_DEBUG_OBJECT (demux, "Calling update_fragment_info");
+ gst_adaptive_demux_stream_update_fragment_info (demux, stream);
+
+ if (GST_CLOCK_TIME_IS_VALID (min_pts)) {
+ min_pts = MIN (min_pts, stream->fragment.timestamp);
+ } else {
+ min_pts = stream->fragment.timestamp;
+ }
+ }
+ }
+
+ period_start = gst_adaptive_demux_get_period_start_time (demux);
+
+ /* For live streams, the subclass is supposed to seek to the current
+ * fragment and then tell us its timestamp in stream->fragment.timestamp.
+ * We now also have to seek our demuxer segment to reflect this.
+ *
+ * FIXME: This needs some refactoring at some point.
+ */
+ if (first_and_live) {
+ gst_segment_do_seek (&demux->segment, demux->segment.rate, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, min_pts + period_start,
+ GST_SEEK_TYPE_NONE, -1, NULL);
+ }
+
+ for (iter = demux->prepared_streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ GstClockTime offset;
+
+ offset = gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ stream->segment = demux->segment;
+
+ /* The demuxer segment is just built from seek events, but for each stream
+ * we have to adjust segments according to the current period and the
+ * stream specific presentation time offset.
+ *
+ * For each period, buffer timestamps start again from 0. Additionally the
+ * buffer timestamps are shifted by the stream specific presentation time
+ * offset, so the first buffer timestamp of a period is 0 + presentation
+ * time offset. If the stream contains timestamps itself, this is also
+ * supposed to be the presentation time stored inside the stream.
+ *
+ * The stream time over periods is supposed to be continuous, that is the
+ * buffer timestamp 0 + presentation time offset should map to the start
+ * time of the current period.
+ *
+ *
+ * The adjustment of the stream segments as such works the following.
+ *
+ * If the demuxer segment start is bigger than the period start, this
+ * means that we have to drop some media at the beginning of the current
+ * period, e.g. because a seek into the middle of the period has
+ * happened. The amount of media to drop is the difference between the
+ * period start and the demuxer segment start, and as each period starts
+ * again from 0, this difference is going to be the actual stream's
+ * segment start. As all timestamps of the stream are shifted by the
+ * presentation time offset, we will also have to move the segment start
+ * by that offset.
+ *
+ * Likewise, the demuxer segment stop value is adjusted in the same
+ * fashion.
+ *
+ * Now the running time and stream time at the stream's segment start has
+ * to be the one that is stored inside the demuxer's segment, which means
+ * that segment.base and segment.time have to be copied over (done just
+ * above)
+ *
+ *
+ * If the demuxer segment start is smaller than the period start time,
+ * this means that the whole period is inside the segment. As each period
+ * starts timestamps from 0, and additionally timestamps are shifted by
+ * the presentation time offset, the stream's first timestamp (and as such
+ * the stream's segment start) has to be the presentation time offset.
+ * The stream time at the segment start is supposed to be the stream time
+ * of the period start according to the demuxer segment, so the stream
+ * segment's time would be set to that. The same goes for the stream
+ * segment's base, which is supposed to be the running time of the period
+ * start according to the demuxer's segment.
+ *
+ * The same logic applies for negative rates with the segment stop and
+ * the period stop time (which gets clamped).
+ *
+ *
+ * For the first case where not the complete period is inside the segment,
+ * the segment time and base as calculated by the second case would be
+ * equivalent.
+ */
+ GST_DEBUG_OBJECT (demux, "Using demux segment %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+ GST_DEBUG_OBJECT (demux,
+ "period_start: %" GST_TIME_FORMAT " offset: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (period_start), GST_TIME_ARGS (offset));
+ /* note for readers:
+ * Since stream->segment is initially a copy of demux->segment,
+ * only the values that need updating are modified below. */
+ if (first_and_live) {
+ /* If first and live, demuxer did seek to the current position already */
+ stream->segment.start = demux->segment.start - period_start + offset;
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ stream->segment.stop = demux->segment.stop - period_start + offset;
+ /* FIXME : Do we need to handle negative rates for this ? */
+ stream->segment.position = stream->segment.start;
+ } else if (demux->segment.start > period_start) {
+ /* seek within a period */
+ stream->segment.start = demux->segment.start - period_start + offset;
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ stream->segment.stop = demux->segment.stop - period_start + offset;
+ if (stream->segment.rate >= 0)
+ stream->segment.position = offset;
+ else
+ stream->segment.position = stream->segment.stop;
+ } else {
+ stream->segment.start = offset;
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ stream->segment.stop = demux->segment.stop - period_start + offset;
+ if (stream->segment.rate >= 0)
+ stream->segment.position = offset;
+ else
+ stream->segment.position = stream->segment.stop;
+ stream->segment.time =
+ gst_segment_to_stream_time (&demux->segment, GST_FORMAT_TIME,
+ period_start);
+ stream->segment.base =
+ gst_segment_to_running_time (&demux->segment, GST_FORMAT_TIME,
+ period_start);
+ }
+
+ stream->pending_segment = gst_event_new_segment (&stream->segment);
+ gst_event_set_seqnum (stream->pending_segment, demux->priv->segment_seqnum);
+
+ GST_DEBUG_OBJECT (demux,
+ "Prepared segment %" GST_SEGMENT_FORMAT " for stream %p",
+ &stream->segment, stream);
+ }
+ demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_adaptive_demux_expose_streams (GstAdaptiveDemux * demux)
+ {
+ GList *iter;
+ GList *old_streams;
+
+ g_return_val_if_fail (demux->prepared_streams != NULL, FALSE);
+
+ old_streams = demux->streams;
+ demux->streams = demux->prepared_streams;
+ demux->prepared_streams = NULL;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ if (!gst_adaptive_demux_expose_stream (demux,
+ GST_ADAPTIVE_DEMUX_STREAM_CAST (stream))) {
+ /* TODO act on error */
+ }
+ }
+ demux->priv->preroll_pending = 0;
+
+ GST_MANIFEST_UNLOCK (demux);
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ GST_MANIFEST_LOCK (demux);
+
+ if (old_streams) {
+ GstEvent *eos = gst_event_new_eos ();
+
+ /* before we put streams in the demux->priv->old_streams list,
+ * we ask the download task to stop. In this way, it will no longer be
+ * allowed to change the demux object.
+ */
+ for (iter = old_streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ GstPad *pad = gst_object_ref (GST_PAD (stream->pad));
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (pad, "Pushing EOS");
+ gst_pad_push_event (pad, gst_event_ref (eos));
+ gst_pad_set_active (pad, FALSE);
+
+ GST_LOG_OBJECT (pad, "Removing stream");
+ gst_element_remove_pad (GST_ELEMENT (demux), pad);
+ GST_MANIFEST_LOCK (demux);
+
+ gst_object_unref (GST_OBJECT (pad));
+
+ /* ask the download task to stop.
+ * We will not join it now, because our thread can be one of these tasks.
+ * We will do the joining later, from another stream download task or
+ * from gst_adaptive_demux_stop_tasks.
+ * We also cannot change the state of the stream->src element, because
+ * that will wait on the streaming thread (which could be this thread)
+ * to stop first.
+ * Because we sent an EOS to the downstream element, the stream->src
+ * element should detect this in its streaming task and stop.
+ * Even if it doesn't do that, we will change its state later in
+ * gst_adaptive_demux_stop_tasks.
+ */
+ GST_LOG_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "Marking stream as cancelled");
+ gst_task_stop (stream->download_task);
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->cancelled = TRUE;
+ stream->replaced = TRUE;
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+ gst_event_unref (eos);
+
+ /* The list should be freed from another thread as we can't properly
+ * cleanup a GstTask from itself */
+ demux->priv->old_streams =
+ g_list_concat (demux->priv->old_streams, old_streams);
+ }
+
+ /* Unblock after removing oldstreams */
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ stream->do_block = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "All streams are exposed");
+
+ return TRUE;
+ }
+
+ /* must be called with manifest_lock taken */
+ GstAdaptiveDemuxStream *
+ gst_adaptive_demux_stream_new (GstAdaptiveDemux * demux, GstPad * pad)
+ {
+ GstAdaptiveDemuxStream *stream;
+
+ stream = g_malloc0 (demux->stream_struct_size);
+
+ /* Downloading task */
+ g_rec_mutex_init (&stream->download_lock);
+ stream->download_task =
+ gst_task_new ((GstTaskFunction) gst_adaptive_demux_stream_download_loop,
+ stream, NULL);
+ gst_task_set_lock (stream->download_task, &stream->download_lock);
+
+ stream->pad = pad;
+ stream->demux = demux;
+ stream->fragment_bitrates =
+ g_malloc0 (sizeof (guint64) * NUM_LOOKBACK_FRAGMENTS);
+ gst_pad_set_element_private (pad, stream);
+ stream->qos_earliest_time = GST_CLOCK_TIME_NONE;
+
+ g_mutex_lock (&demux->priv->preroll_lock);
+ stream->do_block = TRUE;
+ demux->priv->preroll_pending++;
+ g_mutex_unlock (&demux->priv->preroll_lock);
+
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_adaptive_demux_src_query));
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_adaptive_demux_src_event));
+
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
+ g_cond_init (&stream->fragment_download_cond);
+ g_mutex_init (&stream->fragment_download_lock);
+
+ demux->next_streams = g_list_append (demux->next_streams, stream);
+
+ return stream;
+ }
+
+ GstAdaptiveDemuxStream *
+ gst_adaptive_demux_find_stream_for_pad (GstAdaptiveDemux * demux, GstPad * pad)
+ {
+ GList *iter;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ if (stream->pad == pad) {
+ return stream;
+ }
+ }
+
+ return NULL;
+ }
+
+ /* must be called with manifest_lock taken.
+ * It will temporarily drop the manifest_lock in order to join the task.
+ * It will join only the old_streams (the demux->streams are joined by
+ * gst_adaptive_demux_stop_tasks before gst_adaptive_demux_stream_free is
+ * called)
+ */
+ static void
+ gst_adaptive_demux_stream_free (GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->stream_free)
+ klass->stream_free (stream);
+
+ g_clear_error (&stream->last_error);
+ if (stream->download_task) {
+ if (GST_TASK_STATE (stream->download_task) != GST_TASK_STOPPED) {
+ GST_DEBUG_OBJECT (demux, "Leaving streaming task %s:%s",
+ GST_DEBUG_PAD_NAME (stream->pad));
+
+ gst_task_stop (stream->download_task);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->cancelled = TRUE;
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+ GST_LOG_OBJECT (demux, "Waiting for task to finish");
+
+ /* temporarily drop the manifest lock to join the task */
+ GST_MANIFEST_UNLOCK (demux);
+
+ gst_task_join (stream->download_task);
+
+ GST_MANIFEST_LOCK (demux);
+
+ GST_LOG_OBJECT (demux, "Finished");
+ gst_object_unref (stream->download_task);
+ g_rec_mutex_clear (&stream->download_lock);
+ stream->download_task = NULL;
+ }
+
+ gst_adaptive_demux_stream_fragment_clear (&stream->fragment);
+
+ if (stream->pending_segment) {
+ gst_event_unref (stream->pending_segment);
+ stream->pending_segment = NULL;
+ }
+
+ if (stream->pending_events) {
+ g_list_free_full (stream->pending_events, (GDestroyNotify) gst_event_unref);
+ stream->pending_events = NULL;
+ }
+
+ if (stream->internal_pad) {
+ gst_object_unparent (GST_OBJECT_CAST (stream->internal_pad));
+ }
+
+ if (stream->src_srcpad) {
+ gst_object_unref (stream->src_srcpad);
+ stream->src_srcpad = NULL;
+ }
+
+ if (stream->src) {
+ GstElement *src = stream->src;
+
+ stream->src = NULL;
+
+ GST_MANIFEST_UNLOCK (demux);
+ gst_element_set_locked_state (src, TRUE);
+ gst_element_set_state (src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), src);
+ GST_MANIFEST_LOCK (demux);
+ }
+
+ g_cond_clear (&stream->fragment_download_cond);
+ g_mutex_clear (&stream->fragment_download_lock);
+ g_free (stream->fragment_bitrates);
+
+ if (stream->pad) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ }
+ if (stream->pending_caps)
+ gst_caps_unref (stream->pending_caps);
+
+ g_clear_pointer (&stream->pending_tags, gst_tag_list_unref);
+
+ g_free (stream);
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+ gint64 * range_start, gint64 * range_stop)
+ {
+ GstAdaptiveDemuxClass *klass;
+
+ klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ g_return_val_if_fail (klass->get_live_seek_range, FALSE);
+
+ return klass->get_live_seek_range (demux, range_start, range_stop);
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_stream_in_live_seek_range (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ gint64 range_start, range_stop;
+ if (gst_adaptive_demux_get_live_seek_range (demux, &range_start, &range_stop)) {
+ GST_LOG_OBJECT (stream->pad,
+ "stream position %" GST_TIME_FORMAT " live seek range %"
+ GST_STIME_FORMAT " - %" GST_STIME_FORMAT,
+ GST_TIME_ARGS (stream->segment.position), GST_STIME_ARGS (range_start),
+ GST_STIME_ARGS (range_stop));
+ return (stream->segment.position >= range_start
+ && stream->segment.position <= range_stop);
+ }
+
+ return FALSE;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_can_seek (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass;
+
+ klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ if (gst_adaptive_demux_is_live (demux)) {
+ return klass->get_live_seek_range != NULL;
+ }
+
+ return klass->seek != NULL;
+ }
+
+ static void
+ gst_adaptive_demux_update_streams_segment (GstAdaptiveDemux * demux,
+ GList * streams, gint64 period_start, GstSeekType start_type,
+ GstSeekType stop_type)
+ {
+ GList *iter;
+ for (iter = streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ GstEvent *seg_evt;
+ GstClockTime offset;
+
+ /* See comments in gst_adaptive_demux_get_period_start_time() for
+ * an explanation of the segment modifications */
+ stream->segment = demux->segment;
+ offset = gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ stream->segment.start += offset - period_start;
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ stream->segment.stop += offset - period_start;
+ if (demux->segment.rate > 0 && start_type != GST_SEEK_TYPE_NONE)
+ stream->segment.position = stream->segment.start;
+ else if (demux->segment.rate < 0 && stop_type != GST_SEEK_TYPE_NONE)
+ stream->segment.position = stream->segment.stop;
+ seg_evt = gst_event_new_segment (&stream->segment);
+ gst_event_set_seqnum (seg_evt, demux->priv->segment_seqnum);
+ gst_event_replace (&stream->pending_segment, seg_evt);
+ GST_DEBUG_OBJECT (stream->pad, "Pending segment now %" GST_PTR_FORMAT,
+ stream->pending_segment);
+ gst_event_unref (seg_evt);
+ /* Make sure the first buffer after a seek has the discont flag */
+ stream->discont = TRUE;
+ }
+ demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+ }
+
+ #define IS_SNAP_SEEK(f) (f & (GST_SEEK_FLAG_SNAP_BEFORE | \
+ GST_SEEK_FLAG_SNAP_AFTER | \
+ GST_SEEK_FLAG_SNAP_NEAREST | \
+ GST_SEEK_FLAG_TRICKMODE_KEY_UNITS | \
+ GST_SEEK_FLAG_KEY_UNIT))
+ #define REMOVE_SNAP_FLAGS(f) (f & ~(GST_SEEK_FLAG_SNAP_BEFORE | \
+ GST_SEEK_FLAG_SNAP_AFTER | \
+ GST_SEEK_FLAG_SNAP_NEAREST))
+
+ static gboolean
+ gst_adaptive_demux_handle_seek_event (GstAdaptiveDemux * demux, GstPad * pad,
+ GstEvent * event)
+ {
+ GstAdaptiveDemuxClass *demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ guint32 seqnum;
+ gboolean update;
+ gboolean ret;
+ GstSegment oldsegment;
+ GstAdaptiveDemuxStream *stream = NULL;
+
+ GST_INFO_OBJECT (demux, "Received seek event");
+
+ GST_API_LOCK (demux);
+ GST_MANIFEST_LOCK (demux);
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ GST_WARNING_OBJECT (demux,
+ "Adaptive demuxers only support TIME-based seeking");
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ if (flags & GST_SEEK_FLAG_SEGMENT) {
+ GST_FIXME_OBJECT (demux, "Handle segment seeks");
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ seqnum = gst_event_get_seqnum (event);
+
+ if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+ /* For instant rate seeks, reply directly and update
+ * our segment so the new rate is reflected in any future
+ * fragments */
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((demux->segment.rate > 0 && rate < 0) ||
+ (demux->segment.rate < 0 && rate > 0) ||
+ start_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_ERROR_OBJECT (demux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / demux->segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ ret = gst_adaptive_demux_push_src_event (demux, ev);
+
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+
+ return ret;
+ }
+
+ if (!gst_adaptive_demux_can_seek (demux)) {
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ if (gst_adaptive_demux_is_live (demux)) {
+ gint64 range_start, range_stop;
+ gboolean changed = FALSE;
+ gboolean start_valid = TRUE, stop_valid = TRUE;
+
+ if (!gst_adaptive_demux_get_live_seek_range (demux, &range_start,
+ &range_stop)) {
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+ GST_WARNING_OBJECT (demux, "Failure getting the live seek ranges");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "Live range is %" GST_STIME_FORMAT " %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (range_start), GST_STIME_ARGS (range_stop));
+
+ /* Handle relative positioning for live streams (relative to the range_stop) */
+ if (start_type == GST_SEEK_TYPE_END) {
+ start = range_stop + start;
+ start_type = GST_SEEK_TYPE_SET;
+ changed = TRUE;
+ }
+ if (stop_type == GST_SEEK_TYPE_END) {
+ stop = range_stop + stop;
+ stop_type = GST_SEEK_TYPE_SET;
+ changed = TRUE;
+ }
+
+ /* Adjust the requested start/stop position if it falls beyond the live
+ * seek range.
+ * The only case where we don't adjust is for the starting point of
+ * an accurate seek (start if forward and stop if backwards)
+ */
+ if (start_type == GST_SEEK_TYPE_SET && start < range_start &&
+ (rate < 0 || !(flags & GST_SEEK_FLAG_ACCURATE))) {
+ GST_DEBUG_OBJECT (demux,
+ "seek before live stream start, setting to range start: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (range_start));
+ start = range_start;
+ changed = TRUE;
+ }
+ /* truncate stop position also if set */
+ if (stop_type == GST_SEEK_TYPE_SET && stop > range_stop &&
+ (rate > 0 || !(flags & GST_SEEK_FLAG_ACCURATE))) {
+ GST_DEBUG_OBJECT (demux,
+ "seek ending after live start, adjusting to: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (range_stop));
+ stop = range_stop;
+ changed = TRUE;
+ }
+
+ if (start_type == GST_SEEK_TYPE_SET && GST_CLOCK_TIME_IS_VALID (start) &&
+ (start < range_start || start > range_stop)) {
+ GST_WARNING_OBJECT (demux,
+ "Seek to invalid position start:%" GST_STIME_FORMAT
+ " out of seekable range (%" GST_STIME_FORMAT " - %" GST_STIME_FORMAT
+ ")", GST_STIME_ARGS (start), GST_STIME_ARGS (range_start),
+ GST_STIME_ARGS (range_stop));
+ start_valid = FALSE;
+ }
+ if (stop_type == GST_SEEK_TYPE_SET && GST_CLOCK_TIME_IS_VALID (stop) &&
+ (stop < range_start || stop > range_stop)) {
+ GST_WARNING_OBJECT (demux,
+ "Seek to invalid position stop:%" GST_STIME_FORMAT
+ " out of seekable range (%" GST_STIME_FORMAT " - %" GST_STIME_FORMAT
+ ")", GST_STIME_ARGS (stop), GST_STIME_ARGS (range_start),
+ GST_STIME_ARGS (range_stop));
+ stop_valid = FALSE;
+ }
+
+ /* If the seek position is still outside of the seekable range, refuse the seek */
+ if (!start_valid || !stop_valid) {
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ /* Re-create seek event with changed/updated values */
+ if (changed) {
+ gst_event_unref (event);
+ event =
+ gst_event_new_seek (rate, format, flags,
+ start_type, start, stop_type, stop);
+ gst_event_set_seqnum (event, seqnum);
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "seek event, %" GST_PTR_FORMAT, event);
+
+ /* have a backup in case seek fails */
+ gst_segment_copy_into (&demux->segment, &oldsegment);
+
+ if (flags & GST_SEEK_FLAG_FLUSH) {
+ GstEvent *fevent;
+
+ GST_DEBUG_OBJECT (demux, "sending flush start");
+ fevent = gst_event_new_flush_start ();
+ gst_event_set_seqnum (fevent, seqnum);
+ GST_MANIFEST_UNLOCK (demux);
+ gst_adaptive_demux_push_src_event (demux, fevent);
+ GST_MANIFEST_LOCK (demux);
+
+ gst_adaptive_demux_stop_tasks (demux, FALSE);
+ } else if ((rate > 0 && start_type != GST_SEEK_TYPE_NONE) ||
+ (rate < 0 && stop_type != GST_SEEK_TYPE_NONE)) {
+
+ gst_adaptive_demux_stop_tasks (demux, FALSE);
+ }
+
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+
+ /*
+ * Handle snap seeks as follows:
+ * 1) do the snap seeking on the stream that received
+ * the event
+ * 2) use the final position on this stream to seek
+ * on the other streams to the same position
+ *
+ * We can't snap at all streams at the same time as
+ * they might end in different positions, so just
+ * use the one that received the event as the 'leading'
+ * one to do the snap seek.
+ */
+ if (IS_SNAP_SEEK (flags) && demux_class->stream_seek && (stream =
+ gst_adaptive_demux_find_stream_for_pad (demux, pad))) {
+ GstClockTime ts;
+ GstSeekFlags stream_seek_flags = flags;
+
+ /* snap-seek on the stream that received the event and then
+ * use the resulting position to seek on all streams */
+
+ if (rate >= 0) {
+ if (start_type != GST_SEEK_TYPE_NONE)
+ ts = start;
+ else {
+ ts = stream->segment.position;
+ start_type = GST_SEEK_TYPE_SET;
+ }
+ } else {
+ if (stop_type != GST_SEEK_TYPE_NONE)
+ ts = stop;
+ else {
+ stop_type = GST_SEEK_TYPE_SET;
+ ts = stream->segment.position;
+ }
+ }
+
+ if (stream) {
+ demux_class->stream_seek (stream, rate >= 0, stream_seek_flags, ts, &ts);
+ }
+
+ /* replace event with a new one without snapping to seek on all streams */
+ gst_event_unref (event);
+ if (rate >= 0) {
+ start = ts;
+ } else {
+ stop = ts;
+ }
+ event =
+ gst_event_new_seek (rate, format, REMOVE_SNAP_FLAGS (flags),
+ start_type, start, stop_type, stop);
+ GST_DEBUG_OBJECT (demux, "Adapted snap seek to %" GST_PTR_FORMAT, event);
+ }
+ stream = NULL;
+
+ ret = gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+ start, stop_type, stop, &update);
+
+ if (ret) {
+ /* FIXME - this seems unatural, do_seek() is updating base when we
+ * only want the start/stop position to change, maybe do_seek() needs
+ * some fixing? */
+ if (!(flags & GST_SEEK_FLAG_FLUSH) && ((rate > 0
+ && start_type == GST_SEEK_TYPE_NONE) || (rate < 0
+ && stop_type == GST_SEEK_TYPE_NONE))) {
+ demux->segment.base = oldsegment.base;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Calling subclass seek: %" GST_PTR_FORMAT, event);
+
+ ret = demux_class->seek (demux, event);
+ }
+
+ if (!ret) {
+ /* Is there anything else we can do if it fails? */
+ gst_segment_copy_into (&oldsegment, &demux->segment);
+ } else {
+ demux->priv->segment_seqnum = seqnum;
+ }
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ if (flags & GST_SEEK_FLAG_FLUSH) {
+ GstEvent *fevent;
+
+ GST_DEBUG_OBJECT (demux, "Sending flush stop on all pad");
+ fevent = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (fevent, seqnum);
+ gst_adaptive_demux_push_src_event (demux, fevent);
+ }
+
+ if (demux->next_streams) {
+ /* If the seek generated new streams, get them
+ * to preroll */
+ gst_adaptive_demux_prepare_streams (demux, FALSE);
+ gst_adaptive_demux_start_tasks (demux, TRUE);
+ } else {
+ GstClockTime period_start =
+ gst_adaptive_demux_get_period_start_time (demux);
+
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ gst_adaptive_demux_update_streams_segment (demux, demux->streams,
+ period_start, start_type, stop_type);
+ gst_adaptive_demux_update_streams_segment (demux, demux->prepared_streams,
+ period_start, start_type, stop_type);
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ /* Restart the demux */
+ gst_adaptive_demux_start_tasks (demux, FALSE);
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ GST_API_UNLOCK (demux);
+ gst_event_unref (event);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_adaptive_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstAdaptiveDemux *demux;
+
+ demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+
+ /* FIXME handle events received on pads that are to be removed */
+
+ switch (event->type) {
+ case GST_EVENT_SEEK:
+ {
+ guint32 seqnum = gst_event_get_seqnum (event);
+ if (seqnum == demux->priv->segment_seqnum) {
+ GST_LOG_OBJECT (pad,
+ "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+ gst_event_unref (event);
+ return TRUE;
+ }
+ return gst_adaptive_demux_handle_seek_event (demux, pad, event);
+ }
+ case GST_EVENT_RECONFIGURE:{
+ GstAdaptiveDemuxStream *stream;
+
+ GST_MANIFEST_LOCK (demux);
+ stream = gst_adaptive_demux_find_stream_for_pad (demux, pad);
+
+ if (stream) {
+ if (!stream->cancelled && gst_adaptive_demux_is_running (demux) &&
+ stream->last_ret == GST_FLOW_NOT_LINKED) {
+ stream->last_ret = GST_FLOW_OK;
+ stream->restart_download = TRUE;
+ stream->need_header = TRUE;
+ stream->discont = TRUE;
+ GST_DEBUG_OBJECT (stream->pad, "Restarting download loop");
+ gst_task_start (stream->download_task);
+ }
+ gst_event_unref (event);
+ GST_MANIFEST_UNLOCK (demux);
+ return TRUE;
+ }
+ GST_MANIFEST_UNLOCK (demux);
+ }
+ break;
+ case GST_EVENT_LATENCY:{
+ /* Upstream and our internal source are irrelevant
+ * for latency, and we should not fail here to
+ * configure the latency */
+ gst_event_unref (event);
+ return TRUE;
+ }
+ case GST_EVENT_QOS:{
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+ GstClockTime earliest_time;
+
+ gst_event_parse_qos (event, NULL, NULL, &diff, ×tamp);
+ /* Only take into account lateness if late */
+ if (diff > 0)
+ earliest_time = timestamp + 2 * diff;
+ else
+ earliest_time = timestamp;
+
+ GST_OBJECT_LOCK (demux);
+ if (!GST_CLOCK_TIME_IS_VALID (demux->priv->qos_earliest_time) ||
+ earliest_time > demux->priv->qos_earliest_time) {
+ demux->priv->qos_earliest_time = earliest_time;
+ GST_DEBUG_OBJECT (demux, "qos_earliest_time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->priv->qos_earliest_time));
+ }
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static gboolean
+ gst_adaptive_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstAdaptiveDemux *demux;
+ GstAdaptiveDemuxClass *demux_class;
+ gboolean ret = FALSE;
+
+ if (query == NULL)
+ return FALSE;
+
+ demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+ demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ switch (query->type) {
+ case GST_QUERY_DURATION:{
+ GstClockTime duration = -1;
+ GstFormat fmt;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+
+ if (gst_adaptive_demux_is_live (demux)) {
+ /* We are able to answer this query: the duration is unknown */
+ gst_query_set_duration (query, fmt, -1);
+ ret = TRUE;
+ break;
+ }
+
+ if (fmt == GST_FORMAT_TIME
+ && g_atomic_int_get (&demux->priv->have_manifest)) {
+ duration = demux_class->get_duration (demux);
+
+ if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0) {
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ ret = TRUE;
+ }
+ }
+
+ GST_LOG_OBJECT (demux, "GST_QUERY_DURATION returns %s with duration %"
+ GST_TIME_FORMAT, ret ? "TRUE" : "FALSE", GST_TIME_ARGS (duration));
+ break;
+ }
+ case GST_QUERY_LATENCY:{
+ gst_query_set_latency (query, FALSE, 0, -1);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gint64 stop = -1;
+ gint64 start = 0;
+
+ if (!g_atomic_int_get (&demux->priv->have_manifest)) {
+ GST_INFO_OBJECT (demux,
+ "Don't have manifest yet, can't answer seeking query");
+ return FALSE; /* can't answer without manifest */
+ }
+
+ GST_MANIFEST_LOCK (demux);
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ GST_INFO_OBJECT (demux, "Received GST_QUERY_SEEKING with format %d", fmt);
+ if (fmt == GST_FORMAT_TIME) {
+ GstClockTime duration;
+ gboolean can_seek = gst_adaptive_demux_can_seek (demux);
+
+ ret = TRUE;
+ if (can_seek) {
+ if (gst_adaptive_demux_is_live (demux)) {
+ ret = gst_adaptive_demux_get_live_seek_range (demux, &start, &stop);
+ if (!ret) {
+ GST_MANIFEST_UNLOCK (demux);
+ GST_INFO_OBJECT (demux, "can't answer seeking query");
+ return FALSE;
+ }
+ } else {
+ duration = demux_class->get_duration (demux);
+ if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0)
+ stop = duration;
+ }
+ }
+ gst_query_set_seeking (query, fmt, can_seek, start, stop);
+ GST_INFO_OBJECT (demux, "GST_QUERY_SEEKING returning with start : %"
+ GST_TIME_FORMAT ", stop : %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ }
+ GST_MANIFEST_UNLOCK (demux);
+ break;
+ }
+ case GST_QUERY_URI:
+
+ GST_MANIFEST_LOCK (demux);
+
+ /* TODO HLS can answer this differently it seems */
+ if (demux->manifest_uri) {
+ /* FIXME: (hls) Do we answer with the variant playlist, with the current
+ * playlist or the the uri of the last downlowaded fragment? */
+ gst_query_set_uri (query, demux->manifest_uri);
+ ret = TRUE;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ break;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ case GST_QUERY_CUSTOM:{
++ /* Let decoder(which can not support DRC automatically) know the current streaming mode */
++
++ const GstStructure *s;
++ s = gst_query_get_structure (query);
++
++ if (gst_structure_has_name (s, "GstAdaptiveStreaming")) {
++ GValue value = { 0, };
++ GST_DEBUG_OBJECT (demux, "custom query to check adaptive streaming");
++
++ g_value_init (&value, G_TYPE_BOOLEAN);
++ g_value_set_boolean (&value, TRUE);
++ gst_structure_set_value ((GstStructure *) s, "adaptive-streaming",
++ &value);
++
++ g_value_set_boolean (&value, gst_adaptive_demux_is_live (demux));
++ gst_structure_set_value ((GstStructure *) s, "is-live", &value);
++
++ ret = TRUE;
++ } else {
++ GST_DEBUG_OBJECT (demux, "Unsupported query");
++ ret = FALSE;
++ }
++ break;
++ }
++#endif
+ default:
+ /* Don't forward queries upstream because of the special nature of this
+ * "demuxer", which relies on the upstream element only to be fed
+ * the Manifest
+ */
+ break;
+ }
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_start_tasks (GstAdaptiveDemux * demux,
+ gboolean start_preroll_streams)
+ {
+ GList *iter;
+
+ if (!gst_adaptive_demux_is_running (demux)) {
+ GST_DEBUG_OBJECT (demux, "Not starting tasks due to shutdown");
+ return;
+ }
+
+ GST_INFO_OBJECT (demux, "Starting streams' tasks");
+
+ iter = start_preroll_streams ? demux->prepared_streams : demux->streams;
+
+ for (; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ if (!start_preroll_streams) {
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->cancelled = FALSE;
+ stream->replaced = FALSE;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+
+ stream->last_ret = GST_FLOW_OK;
+ gst_task_start (stream->download_task);
+ }
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_stop_manifest_update_task (GstAdaptiveDemux * demux)
+ {
+ gst_uri_downloader_cancel (demux->downloader);
+
+ gst_task_stop (demux->priv->updates_task);
+
+ g_mutex_lock (&demux->priv->updates_timed_lock);
+ GST_DEBUG_OBJECT (demux, "requesting stop of the manifest update task");
+ demux->priv->stop_updates_task = TRUE;
+ g_cond_signal (&demux->priv->updates_timed_cond);
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_start_manifest_update_task (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (gst_adaptive_demux_is_live (demux)) {
+ gst_uri_downloader_reset (demux->downloader);
+ g_mutex_lock (&demux->priv->updates_timed_lock);
+ demux->priv->stop_updates_task = FALSE;
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+ /* Task to periodically update the manifest */
+ if (demux_class->requires_periodical_playlist_update (demux)) {
+ GST_DEBUG_OBJECT (demux, "requesting start of the manifest update task");
+ gst_task_start (demux->priv->updates_task);
+ }
+ }
+ }
+
+ /* must be called with manifest_lock taken
+ * This function will temporarily release manifest_lock in order to join the
+ * download threads.
+ * The api_lock will still protect it against other threads trying to modify
+ * the demux element.
+ */
+ static void
+ gst_adaptive_demux_stop_tasks (GstAdaptiveDemux * demux, gboolean stop_updates)
+ {
+ int i;
+ GList *iter;
+ GList *list_to_process;
+
+ GST_LOG_OBJECT (demux, "Stopping tasks");
+
+ if (stop_updates)
+ gst_adaptive_demux_stop_manifest_update_task (demux);
+
+ list_to_process = demux->streams;
+ for (i = 0; i < 2; ++i) {
+ for (iter = list_to_process; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->cancelled = TRUE;
+ gst_task_stop (stream->download_task);
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+ list_to_process = demux->prepared_streams;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ g_mutex_lock (&demux->priv->preroll_lock);
+ g_cond_broadcast (&demux->priv->preroll_cond);
+ g_mutex_unlock (&demux->priv->preroll_lock);
+ GST_MANIFEST_LOCK (demux);
+
+ g_mutex_lock (&demux->priv->manifest_update_lock);
+ g_cond_broadcast (&demux->priv->manifest_cond);
+ g_mutex_unlock (&demux->priv->manifest_update_lock);
+
+ /* need to release manifest_lock before stopping the src element.
+ * The streams were asked to cancel, so they will not make any writes to demux
+ * object. Even if we temporarily release manifest_lock, the demux->streams
+ * cannot change and iter cannot be invalidated.
+ */
+ list_to_process = demux->streams;
+ for (i = 0; i < 2; ++i) {
+ for (iter = list_to_process; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ GstElement *src = stream->src;
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ if (src) {
+ gst_element_set_locked_state (src, TRUE);
+ gst_element_set_state (src, GST_STATE_READY);
+ }
+
+ /* stream->download_task value never changes, so it is safe to read it
+ * outside critical section
+ */
+ gst_task_join (stream->download_task);
+
+ GST_MANIFEST_LOCK (demux);
+ }
+ list_to_process = demux->prepared_streams;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+ if (stop_updates)
+ gst_task_join (demux->priv->updates_task);
+
+ GST_MANIFEST_LOCK (demux);
+
+ list_to_process = demux->streams;
+ for (i = 0; i < 2; ++i) {
+ for (iter = list_to_process; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ stream->download_error_count = 0;
+ stream->need_header = TRUE;
+ }
+ list_to_process = demux->prepared_streams;
+ }
+ demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_push_src_event (GstAdaptiveDemux * demux, GstEvent * event)
+ {
+ GList *iter;
+ gboolean ret = TRUE;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+ gst_event_ref (event);
+ ret = ret & gst_pad_push_event (stream->pad, event);
+ }
+ gst_event_unref (event);
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ void
+ gst_adaptive_demux_stream_set_caps (GstAdaptiveDemuxStream * stream,
+ GstCaps * caps)
+ {
+ GST_DEBUG_OBJECT (stream->pad, "setting new caps for stream %" GST_PTR_FORMAT,
+ caps);
+ gst_caps_replace (&stream->pending_caps, caps);
+ gst_caps_unref (caps);
+ }
+
+ /* must be called with manifest_lock taken */
+ void
+ gst_adaptive_demux_stream_set_tags (GstAdaptiveDemuxStream * stream,
+ GstTagList * tags)
+ {
+ GST_DEBUG_OBJECT (stream->pad, "setting new tags for stream %" GST_PTR_FORMAT,
+ tags);
+ if (stream->pending_tags) {
+ gst_tag_list_unref (stream->pending_tags);
+ }
+ stream->pending_tags = tags;
+ }
+
+ /* must be called with manifest_lock taken */
+ void
+ gst_adaptive_demux_stream_queue_event (GstAdaptiveDemuxStream * stream,
+ GstEvent * event)
+ {
+ stream->pending_events = g_list_append (stream->pending_events, event);
+ }
+
+ /* must be called with manifest_lock taken */
+ static guint64
+ _update_average_bitrate (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, guint64 new_bitrate)
+ {
+ gint index = stream->moving_index % NUM_LOOKBACK_FRAGMENTS;
+
+ stream->moving_bitrate -= stream->fragment_bitrates[index];
+ stream->fragment_bitrates[index] = new_bitrate;
+ stream->moving_bitrate += new_bitrate;
+
+ stream->moving_index += 1;
+
+ if (stream->moving_index > NUM_LOOKBACK_FRAGMENTS)
+ return stream->moving_bitrate / NUM_LOOKBACK_FRAGMENTS;
+ return stream->moving_bitrate / stream->moving_index;
+ }
+
+ /* must be called with manifest_lock taken */
+ static guint64
+ gst_adaptive_demux_stream_update_current_bitrate (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ guint64 average_bitrate;
+ guint64 fragment_bitrate;
+
+ if (demux->connection_speed) {
+ GST_LOG_OBJECT (demux, "Connection-speed is set to %u kbps, using it",
+ demux->connection_speed / 1000);
+ stream->current_download_rate = demux->connection_speed;
+ return demux->connection_speed;
+ }
+
+ fragment_bitrate = stream->last_bitrate;
+ GST_DEBUG_OBJECT (demux, "Download bitrate is : %" G_GUINT64_FORMAT " bps",
+ fragment_bitrate);
+
+ average_bitrate = _update_average_bitrate (demux, stream, fragment_bitrate);
+
+ GST_INFO_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "last fragment bitrate was %" G_GUINT64_FORMAT, fragment_bitrate);
+ GST_INFO_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "Last %u fragments average bitrate is %" G_GUINT64_FORMAT,
+ NUM_LOOKBACK_FRAGMENTS, average_bitrate);
+
+ /* Conservative approach, make sure we don't upgrade too fast */
+ stream->current_download_rate = MIN (average_bitrate, fragment_bitrate);
+
+ stream->current_download_rate *= demux->bitrate_limit;
+ GST_DEBUG_OBJECT (demux, "Bitrate after bitrate limit (%0.2f): %"
+ G_GUINT64_FORMAT, demux->bitrate_limit, stream->current_download_rate);
+
+ #if 0
+ /* Debugging code, modulate the bitrate every few fragments */
+ {
+ static guint ctr = 0;
+ if (ctr % 3 == 0) {
+ GST_INFO_OBJECT (demux, "Halving reported bitrate for debugging");
+ stream->current_download_rate /= 2;
+ }
+ ctr++;
+ }
+ #endif
+
+ return stream->current_download_rate;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_combine_flows (GstAdaptiveDemux * demux)
+ {
+ gboolean all_notlinked = TRUE;
+ gboolean all_eos = TRUE;
+ GList *iter;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *stream = iter->data;
+
+ if (stream->last_ret != GST_FLOW_NOT_LINKED) {
+ all_notlinked = FALSE;
+ if (stream->last_ret != GST_FLOW_EOS)
+ all_eos = FALSE;
+ }
+
+ if (stream->last_ret <= GST_FLOW_NOT_NEGOTIATED
+ || stream->last_ret == GST_FLOW_FLUSHING) {
+ return stream->last_ret;
+ }
+ }
+ if (all_notlinked)
+ return GST_FLOW_NOT_LINKED;
+ else if (all_eos)
+ return GST_FLOW_EOS;
+ return GST_FLOW_OK;
+ }
+
+ /* Called with preroll_lock */
+ static void
+ gst_adaptive_demux_handle_preroll (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ demux->priv->preroll_pending--;
+ if (demux->priv->preroll_pending == 0) {
+ /* That was the last one, time to release all streams
+ * and expose them */
+ GST_DEBUG_OBJECT (demux, "All streams prerolled. exposing");
+ gst_adaptive_demux_expose_streams (demux);
+ g_cond_broadcast (&demux->priv->preroll_cond);
+ }
+ }
+
+ /* must be called with manifest_lock taken.
+ * Temporarily releases manifest_lock
+ */
+ GstFlowReturn
+ gst_adaptive_demux_stream_push_buffer (GstAdaptiveDemuxStream * stream,
+ GstBuffer * buffer)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean discont = FALSE;
+ /* Pending events */
+ GstEvent *pending_caps = NULL, *pending_segment = NULL, *pending_tags = NULL;
+ GList *pending_events = NULL;
+
++ /* FIXME :
+ * This is duplicating *exactly* the same thing as what is done at the beginning
+ * of _src_chain if starting_fragment is TRUE */
+ if (stream->first_fragment_buffer) {
+ GstClockTime offset =
+ gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ GstClockTime period_start =
+ gst_adaptive_demux_get_period_start_time (demux);
+
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ if (demux->segment.rate < 0)
+ /* Set DISCONT flag for every first buffer in reverse playback mode
+ * as each fragment for its own has to be reversed */
+ discont = TRUE;
+
+ GST_BUFFER_PTS (buffer) = stream->fragment.timestamp;
+ if (GST_BUFFER_PTS_IS_VALID (buffer))
+ GST_BUFFER_PTS (buffer) += offset;
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer)) {
+ stream->segment.position = GST_BUFFER_PTS (buffer);
+
+ /* Convert from position inside the stream's segment to the demuxer's
+ * segment, they are not necessarily the same */
+ if (stream->segment.position - offset + period_start >
+ demux->segment.position)
+ demux->segment.position =
+ stream->segment.position - offset + period_start;
+ }
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ GST_LOG_OBJECT (stream->pad,
+ "Going to push buffer with PTS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
+ } else {
+ GST_BUFFER_PTS (buffer) = GST_CLOCK_TIME_NONE;
+ }
+
+ if (stream->discont) {
+ discont = TRUE;
+ stream->discont = FALSE;
+ }
+
+ if (discont) {
+ GST_DEBUG_OBJECT (stream->pad, "Marking fragment as discontinuous");
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ stream->first_fragment_buffer = FALSE;
+
+ GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
+ if (G_UNLIKELY (stream->pending_caps)) {
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ stream->pending_caps = gst_caps_make_writable (stream->pending_caps);
++ if (gst_adaptive_demux_is_live (demux)) {
++ gst_caps_set_simple (stream->pending_caps, "is_live", G_TYPE_BOOLEAN,
++ TRUE, NULL);
++ } else {
++ gst_caps_set_simple (stream->pending_caps, "is_live", G_TYPE_BOOLEAN,
++ FALSE, NULL);
++ }
++#endif
+ pending_caps = gst_event_new_caps (stream->pending_caps);
+ gst_caps_unref (stream->pending_caps);
+ stream->pending_caps = NULL;
+ }
+
+ if (stream->do_block) {
+
+ g_mutex_lock (&demux->priv->preroll_lock);
+
+ /* If we are preroll state, set caps in here */
+ if (pending_caps) {
+ gst_pad_push_event (stream->pad, pending_caps);
+ pending_caps = NULL;
+ }
+
+ gst_adaptive_demux_handle_preroll (demux, stream);
+ GST_MANIFEST_UNLOCK (demux);
+
+ while (stream->do_block && !stream->cancelled) {
+ GST_LOG_OBJECT (demux, "Stream %p sleeping for preroll", stream);
+ g_cond_wait (&demux->priv->preroll_cond, &demux->priv->preroll_lock);
+ }
+ if (stream->cancelled) {
+ GST_LOG_OBJECT (demux, "stream %p cancelled", stream);
+ gst_buffer_unref (buffer);
+ g_mutex_unlock (&demux->priv->preroll_lock);
+ return GST_FLOW_FLUSHING;
+ }
+
+ g_mutex_unlock (&demux->priv->preroll_lock);
+ GST_MANIFEST_LOCK (demux);
+ }
+
+ if (G_UNLIKELY (stream->pending_segment)) {
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ pending_segment = stream->pending_segment;
+ stream->pending_segment = NULL;
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+ }
+ if (G_UNLIKELY (stream->pending_tags || stream->bitrate_changed)) {
+ GstTagList *tags = stream->pending_tags;
+
+ stream->pending_tags = NULL;
+ stream->bitrate_changed = 0;
+
+ if (stream->fragment.bitrate != 0) {
+ if (tags)
+ tags = gst_tag_list_make_writable (tags);
+ else
+ tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_KEEP,
+ GST_TAG_NOMINAL_BITRATE, stream->fragment.bitrate, NULL);
+ }
+ if (tags)
+ pending_tags = gst_event_new_tag (tags);
+ }
+ if (G_UNLIKELY (stream->pending_events)) {
+ pending_events = stream->pending_events;
+ stream->pending_events = NULL;
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ /* Do not push events or buffers holding the manifest lock */
+ if (G_UNLIKELY (pending_caps)) {
+ GST_DEBUG_OBJECT (stream->pad, "Setting pending caps: %" GST_PTR_FORMAT,
+ pending_caps);
+ gst_pad_push_event (stream->pad, pending_caps);
+ }
+ if (G_UNLIKELY (pending_segment)) {
+ GST_DEBUG_OBJECT (stream->pad, "Sending pending seg: %" GST_PTR_FORMAT,
+ pending_segment);
+ gst_pad_push_event (stream->pad, pending_segment);
+ }
+ if (G_UNLIKELY (pending_tags)) {
+ GST_DEBUG_OBJECT (stream->pad, "Sending pending tags: %" GST_PTR_FORMAT,
+ pending_tags);
+ gst_pad_push_event (stream->pad, pending_tags);
+ }
+ while (pending_events != NULL) {
+ GstEvent *event = pending_events->data;
+
+ if (!gst_pad_push_event (stream->pad, event))
+ GST_ERROR_OBJECT (stream->pad, "Failed to send pending event");
+
+ pending_events = g_list_delete_link (pending_events, pending_events);
+ }
+
+ /* Wait for preroll if blocking */
+ GST_DEBUG_OBJECT (stream->pad,
+ "About to push buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
+
+ ret = gst_pad_push (stream->pad, buffer);
+
+ GST_MANIFEST_LOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ GST_LOG_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "Stream was cancelled");
+ ret = stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_LOG_OBJECT (stream->pad, "Push result: %d %s", ret,
+ gst_flow_get_name (ret));
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_finish_fragment_default (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ /* No need to advance, this isn't a real fragment */
+ if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+ return GST_FLOW_OK;
+
+ return gst_adaptive_demux_stream_advance_fragment (demux, stream,
+ stream->fragment.duration);
+ }
+
+ /* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_data_received_default (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstBuffer * buffer)
+ {
+ return gst_adaptive_demux_stream_push_buffer (stream, buffer);
+ }
+
+ static gboolean
+ gst_adaptive_demux_requires_periodical_playlist_update_default (GstAdaptiveDemux
+ * demux)
+ {
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ _src_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstAdaptiveDemuxStream *stream;
+ GstAdaptiveDemux *demux;
+ GstAdaptiveDemuxClass *klass;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+ stream = gst_pad_get_element_private (pad);
+ klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ GST_MANIFEST_LOCK (demux);
+
+ /* do not make any changes if the stream is cancelled */
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ gst_buffer_unref (buffer);
+ ret = stream->last_ret = GST_FLOW_FLUSHING;
+ GST_MANIFEST_UNLOCK (demux);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* starting_fragment is set to TRUE at the beginning of
+ * _stream_download_fragment()
+ * /!\ If there is a header/index being downloaded, then this will
+ * be TRUE for the first one ... but FALSE for the remaining ones,
+ * including the *actual* fragment ! */
+ if (stream->starting_fragment) {
+ GstClockTime offset =
+ gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ GstClockTime period_start =
+ gst_adaptive_demux_get_period_start_time (demux);
+
+ stream->starting_fragment = FALSE;
+ if (klass->start_fragment) {
+ if (!klass->start_fragment (demux, stream)) {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ GST_BUFFER_PTS (buffer) = stream->fragment.timestamp;
+ if (GST_BUFFER_PTS_IS_VALID (buffer))
+ GST_BUFFER_PTS (buffer) += offset;
+
+ GST_LOG_OBJECT (stream->pad, "set fragment pts=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer)) {
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ stream->segment.position = GST_BUFFER_PTS (buffer);
+
+ /* Convert from position inside the stream's segment to the demuxer's
+ * segment, they are not necessarily the same */
+ if (stream->segment.position - offset + period_start >
+ demux->segment.position)
+ demux->segment.position =
+ stream->segment.position - offset + period_start;
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+ }
+
+ } else {
+ GST_BUFFER_PTS (buffer) = GST_CLOCK_TIME_NONE;
+ }
+
+ /* downloading_first_buffer is set to TRUE in download_uri() just before
+ * activating the source (i.e. requesting a given URI)
+ *
+ * The difference with starting_fragment is that this will be called
+ * for *all* first buffers (of index, and header, and fragment)
+ *
+ * ... to then only do something useful (in this block) for actual
+ * fragments... */
+ if (stream->downloading_first_buffer) {
+ gint64 chunk_size = 0;
+
+ stream->downloading_first_buffer = FALSE;
+
+ if (!stream->downloading_header && !stream->downloading_index) {
+ /* If this is the first buffer of a fragment (not the headers or index)
+ * and we don't have a birate from the sub-class, then see if we
+ * can work it out from the fragment size and duration */
+ if (stream->fragment.bitrate == 0 &&
+ stream->fragment.duration != 0 &&
+ gst_element_query_duration (stream->uri_handler, GST_FORMAT_BYTES,
+ &chunk_size) && chunk_size != -1) {
+ guint bitrate = MIN (G_MAXUINT, gst_util_uint64_scale (chunk_size,
+ 8 * GST_SECOND, stream->fragment.duration));
+ GST_LOG_OBJECT (demux,
+ "Fragment has size %" G_GINT64_FORMAT " duration %" GST_TIME_FORMAT
+ " = bitrate %u", chunk_size,
+ GST_TIME_ARGS (stream->fragment.duration), bitrate);
+ stream->fragment.bitrate = bitrate;
+ }
+ if (stream->fragment.bitrate) {
+ stream->bitrate_changed = TRUE;
+ } else {
+ GST_WARNING_OBJECT (demux, "Bitrate for fragment not available");
+ }
+ }
+ }
+
+ stream->download_total_bytes += gst_buffer_get_size (buffer);
+
+ GST_TRACE_OBJECT (stream->pad, "Received buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
+
+ ret = klass->data_received (demux, stream, buffer);
+
+ if (ret == GST_FLOW_FLUSHING) {
+ /* do not make any changes if the stream is cancelled */
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_UNLOCK (demux);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+
+ if (ret != GST_FLOW_OK) {
+ gboolean finished = FALSE;
+
+ if (ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+
+ /* TODO push this on all pads */
+ gst_pad_push_event (stream->pad, gst_event_new_eos ());
+ } else {
+ GST_DEBUG_OBJECT (stream->pad, "stream stopped, reason %s",
+ gst_flow_get_name (ret));
+ }
+
+ if (ret == (GstFlowReturn) GST_ADAPTIVE_DEMUX_FLOW_SWITCH) {
+ ret = GST_FLOW_EOS; /* return EOS to make the source stop */
+ } else if (ret == GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT) {
+ /* Behaves like an EOS event from upstream */
+ stream->fragment.finished = TRUE;
+ ret = klass->finish_fragment (demux, stream);
+ if (ret == (GstFlowReturn) GST_ADAPTIVE_DEMUX_FLOW_SWITCH) {
+ ret = GST_FLOW_EOS; /* return EOS to make the source stop */
+ } else if (ret != GST_FLOW_OK) {
+ goto error;
+ }
+ finished = TRUE;
+ }
+
+ gst_adaptive_demux_stream_fragment_download_finish (stream, ret, NULL);
+ if (finished)
+ ret = GST_FLOW_EOS;
+ }
+
+ error:
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_stream_fragment_download_finish (GstAdaptiveDemuxStream *
+ stream, GstFlowReturn ret, GError * err)
+ {
+ GST_DEBUG_OBJECT (stream->pad, "Download finish: %d %s - err: %p", ret,
+ gst_flow_get_name (ret), err);
+
+ /* if we have an error, only replace last_ret if it was OK before to avoid
+ * overwriting the first error we got */
+ if (stream->last_ret == GST_FLOW_OK) {
+ stream->last_ret = ret;
+ if (err) {
+ g_clear_error (&stream->last_error);
+ stream->last_error = g_error_copy (err);
+ }
+ }
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->download_finished = TRUE;
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+
+ static GstFlowReturn
+ gst_adaptive_demux_eos_handling (GstAdaptiveDemuxStream * stream)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (stream->demux);
+
+ if (!klass->need_another_chunk || stream->fragment.chunk_size == -1
+ || !klass->need_another_chunk (stream)
+ || stream->fragment.chunk_size == 0) {
+ stream->fragment.finished = TRUE;
+
+ /* Last chance to figure out a fallback nominal bitrate if neither baseclass
+ nor the HTTP Content-Length implementation worked. */
+ if (stream->fragment.bitrate == 0 && stream->fragment.duration != 0 &&
+ stream->fragment_bytes_downloaded != 0 && !stream->downloading_index &&
+ !stream->downloading_header) {
+ guint bitrate = MIN (G_MAXUINT,
+ gst_util_uint64_scale (stream->fragment_bytes_downloaded,
+ 8 * GST_SECOND, stream->fragment.duration));
+ GST_LOG_OBJECT (stream->pad,
+ "Fragment has size %" G_GUINT64_FORMAT " duration %" GST_TIME_FORMAT
+ " = bitrate %u", stream->fragment_bytes_downloaded,
+ GST_TIME_ARGS (stream->fragment.duration), bitrate);
+ stream->fragment.bitrate = bitrate;
+ stream->bitrate_changed = TRUE;
+ }
+ ret = klass->finish_fragment (stream->demux, stream);
+ }
+ gst_adaptive_demux_stream_fragment_download_finish (stream, ret, NULL);
+
+ return ret;
+ }
+
+ static gboolean
+ _src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstAdaptiveDemuxStream *stream = gst_pad_get_element_private (pad);
+ GstAdaptiveDemux *demux = stream->demux;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:{
+ GST_DEBUG_OBJECT (pad, "Saw EOS on src pad");
+ GST_MANIFEST_LOCK (demux);
+
+ gst_adaptive_demux_eos_handling (stream);
+
+ /* FIXME ?
+ * _eos_handling() calls fragment_download_finish() which does the
+ * same thing as below.
+ * Could this cause races ? */
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->download_finished = TRUE;
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_MANIFEST_UNLOCK (demux);
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+
+ static gboolean
+ _src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstAdaptiveDemuxStream *stream = gst_pad_get_element_private (pad);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_ALLOCATION:
+ return FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_peer_query (stream->pad, query);
+ }
+
+ static GstPadProbeReturn
+ _uri_handler_probe (GstPad * pad, GstPadProbeInfo * info,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+
+ if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_BUFFER) {
+ GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info);
+ if (stream->fragment_bytes_downloaded == 0) {
+ stream->last_latency =
+ gst_adaptive_demux_get_monotonic_time (stream->demux) -
+ (stream->download_start_time * GST_USECOND);
+ GST_DEBUG_OBJECT (pad,
+ "FIRST BYTE since download_start %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->last_latency));
+ }
+ stream->fragment_bytes_downloaded += gst_buffer_get_size (buf);
+ GST_LOG_OBJECT (pad,
+ "Received buffer, size %" G_GSIZE_FORMAT " total %" G_GUINT64_FORMAT,
+ gst_buffer_get_size (buf), stream->fragment_bytes_downloaded);
+ } else if (GST_PAD_PROBE_INFO_TYPE (info) &
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) {
+ GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info);
+ GST_LOG_OBJECT (pad, "Received event %s %" GST_PTR_FORMAT,
+ GST_EVENT_TYPE_NAME (ev), ev);
+ switch (GST_EVENT_TYPE (ev)) {
+ case GST_EVENT_SEGMENT:
+ stream->fragment_bytes_downloaded = 0;
+ break;
+ case GST_EVENT_EOS:
+ {
+ stream->last_download_time =
+ gst_adaptive_demux_get_monotonic_time (stream->demux) -
+ (stream->download_start_time * GST_USECOND);
+ stream->last_bitrate =
+ gst_util_uint64_scale (stream->fragment_bytes_downloaded,
+ 8 * GST_SECOND, stream->last_download_time);
+ GST_DEBUG_OBJECT (pad,
+ "EOS since download_start %" GST_TIME_FORMAT " bitrate %"
+ G_GUINT64_FORMAT " bps", GST_TIME_ARGS (stream->last_download_time),
+ stream->last_bitrate);
+ /* Calculate bitrate since URI request */
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ */
+ static gboolean
+ gst_adaptive_demux_stream_wait_manifest_update (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ gboolean ret = TRUE;
+
+ /* Wait until we're cancelled or there's something for
+ * us to download in the playlist or the playlist
+ * became non-live */
+ while (TRUE) {
+ GST_DEBUG_OBJECT (demux, "No fragment left but live playlist, wait a bit");
+
+ /* get the manifest_update_lock while still holding the manifest_lock.
+ * This will prevent other threads to signal the condition (they will need
+ * both manifest_lock and manifest_update_lock in order to signal).
+ * It cannot deadlock because all threads always get the manifest_lock first
+ * and manifest_update_lock second.
+ */
+ g_mutex_lock (&demux->priv->manifest_update_lock);
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ g_cond_wait (&demux->priv->manifest_cond,
+ &demux->priv->manifest_update_lock);
+ g_mutex_unlock (&demux->priv->manifest_update_lock);
+
+ GST_MANIFEST_LOCK (demux);
+
+ /* check for cancelled every time we get the manifest_lock */
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ ret = FALSE;
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ break;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* Got a new fragment or not live anymore? */
+ if (gst_adaptive_demux_stream_update_fragment_info (demux, stream) ==
+ GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "new fragment available, "
+ "not waiting for manifest update");
+ ret = TRUE;
+ break;
+ }
+
+ if (!gst_adaptive_demux_is_live (demux)) {
+ GST_DEBUG_OBJECT (demux, "Not live anymore, "
+ "not waiting for manifest update");
+ ret = FALSE;
+ break;
+ }
+ }
+ GST_DEBUG_OBJECT (demux, "Retrying now");
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_stream_update_source (GstAdaptiveDemuxStream * stream,
+ const gchar * uri, const gchar * referer, gboolean refresh,
+ gboolean allow_cache)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+
+ if (!gst_uri_is_valid (uri)) {
+ GST_WARNING_OBJECT (stream->pad, "Invalid URI: %s", uri);
+ return FALSE;
+ }
+
+ /* Try to re-use existing source element */
+ if (stream->src != NULL) {
+ gchar *old_protocol, *new_protocol;
+ gchar *old_uri;
+
+ old_uri = gst_uri_handler_get_uri (GST_URI_HANDLER (stream->uri_handler));
+ old_protocol = gst_uri_get_protocol (old_uri);
+ new_protocol = gst_uri_get_protocol (uri);
+
+ if (!g_str_equal (old_protocol, new_protocol)) {
+ GstElement *src = stream->src;
+
+ stream->src = NULL;
+ gst_object_unref (stream->src_srcpad);
+ stream->src_srcpad = NULL;
+ GST_MANIFEST_UNLOCK (demux);
+ gst_element_set_locked_state (src, TRUE);
+ gst_element_set_state (src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), src);
+ GST_MANIFEST_LOCK (demux);
+ GST_DEBUG_OBJECT (demux, "Can't re-use old source element");
+ } else {
+ GError *err = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Re-using old source element");
+ if (!gst_uri_handler_set_uri (GST_URI_HANDLER (stream->uri_handler), uri,
+ &err)) {
+ GstElement *src = stream->src;
+
+ stream->src = NULL;
+ GST_DEBUG_OBJECT (demux, "Failed to re-use old source element: %s",
+ err ? err->message : "Unknown error");
+ g_clear_error (&err);
+ gst_object_unref (stream->src_srcpad);
+ stream->src_srcpad = NULL;
+ GST_MANIFEST_UNLOCK (demux);
+ gst_element_set_locked_state (src, TRUE);
+ gst_element_set_state (src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), src);
+ GST_MANIFEST_LOCK (demux);
+ }
+ }
+ g_free (old_uri);
+ g_free (old_protocol);
+ g_free (new_protocol);
+ }
+
+ if (stream->src == NULL) {
+ GstPad *uri_handler_src;
+ GstPad *queue_sink;
+ GstPad *queue_src;
+ GstElement *uri_handler;
+ GstElement *queue;
+ GstPadLinkReturn pad_link_ret;
+ GObjectClass *gobject_class;
+ gchar *internal_name, *bin_name;
+
+ /* Our src consists of a bin containing uri_handler -> queue . The
+ * purpose of the queue is to allow the uri_handler to download an
+ * entire fragment without blocking, so we can accurately measure the
+ * download bitrate. */
+
+ queue = gst_element_factory_make ("queue", NULL);
+ if (queue == NULL)
+ return FALSE;
+
+ g_object_set (queue, "max-size-bytes", (guint) SRC_QUEUE_MAX_BYTES, NULL);
+ g_object_set (queue, "max-size-buffers", (guint) 0, NULL);
+ g_object_set (queue, "max-size-time", (guint64) 0, NULL);
+
+ uri_handler = gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ if (uri_handler == NULL) {
+ GST_ELEMENT_ERROR (demux, CORE, MISSING_PLUGIN,
+ ("Missing plugin to handle URI: '%s'", uri), (NULL));
+ gst_object_unref (queue);
+ return FALSE;
+ }
+
+ gobject_class = G_OBJECT_GET_CLASS (uri_handler);
+
+ if (g_object_class_find_property (gobject_class, "compress"))
+ g_object_set (uri_handler, "compress", FALSE, NULL);
+ if (g_object_class_find_property (gobject_class, "keep-alive"))
+ g_object_set (uri_handler, "keep-alive", TRUE, NULL);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ if (g_object_class_find_property (gobject_class, "user-agent")
++ && (demux->user_agent))
++ g_object_set (uri_handler, "user-agent", demux->user_agent, NULL);
++ if (g_object_class_find_property (gobject_class, "cookies")
++ && (demux->cookies))
++ g_object_set (uri_handler, "cookies", demux->cookies, NULL);
++#endif
+ if (g_object_class_find_property (gobject_class, "extra-headers")) {
+ if (referer || refresh || !allow_cache) {
+ GstStructure *extra_headers = gst_structure_new_empty ("headers");
+
+ if (referer)
+ gst_structure_set (extra_headers, "Referer", G_TYPE_STRING, referer,
+ NULL);
+
+ if (!allow_cache)
+ gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
+ "no-cache", NULL);
+ else if (refresh)
+ gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
+ "max-age=0", NULL);
+
+ g_object_set (uri_handler, "extra-headers", extra_headers, NULL);
+
+ gst_structure_free (extra_headers);
+ } else {
+ g_object_set (uri_handler, "extra-headers", NULL, NULL);
+ }
+ }
+
+ /* Source bin creation */
+ bin_name = g_strdup_printf ("srcbin-%s", GST_PAD_NAME (stream->pad));
+ stream->src = gst_bin_new (bin_name);
+ g_free (bin_name);
+ if (stream->src == NULL) {
+ gst_object_unref (queue);
+ gst_object_unref (uri_handler);
+ return FALSE;
+ }
+
+ gst_bin_add (GST_BIN_CAST (stream->src), queue);
+ gst_bin_add (GST_BIN_CAST (stream->src), uri_handler);
+
+ uri_handler_src = gst_element_get_static_pad (uri_handler, "src");
+ queue_sink = gst_element_get_static_pad (queue, "sink");
+
+ pad_link_ret =
+ gst_pad_link_full (uri_handler_src, queue_sink,
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (GST_PAD_LINK_FAILED (pad_link_ret)) {
+ GST_WARNING_OBJECT (demux,
+ "Could not link pads %s:%s to %s:%s for reason %d",
+ GST_DEBUG_PAD_NAME (uri_handler_src), GST_DEBUG_PAD_NAME (queue_sink),
+ pad_link_ret);
+ g_object_unref (queue_sink);
+ g_object_unref (uri_handler_src);
+ gst_object_unref (stream->src);
+ stream->src = NULL;
+ return FALSE;
+ }
+
+ /* Add a downstream event and data probe */
+ gst_pad_add_probe (uri_handler_src, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM,
+ (GstPadProbeCallback) _uri_handler_probe, stream, NULL);
+
+ g_object_unref (queue_sink);
+ g_object_unref (uri_handler_src);
+ queue_src = gst_element_get_static_pad (queue, "src");
+ stream->src_srcpad = gst_ghost_pad_new ("src", queue_src);
+ g_object_unref (queue_src);
+ gst_element_add_pad (stream->src, stream->src_srcpad);
+
+ gst_element_set_locked_state (stream->src, TRUE);
+ gst_bin_add (GST_BIN_CAST (demux), stream->src);
+ stream->src_srcpad = gst_element_get_static_pad (stream->src, "src");
+
+ /* set up our internal floating pad to drop all events from
+ * the http src we don't care about. On the chain function
+ * we just push the buffer forward */
+ internal_name = g_strdup_printf ("internal-%s", GST_PAD_NAME (stream->pad));
+ stream->internal_pad = gst_pad_new (internal_name, GST_PAD_SINK);
+ g_free (internal_name);
+ gst_object_set_parent (GST_OBJECT_CAST (stream->internal_pad),
+ GST_OBJECT_CAST (demux));
+ GST_OBJECT_FLAG_SET (stream->internal_pad, GST_PAD_FLAG_NEED_PARENT);
+ gst_pad_set_element_private (stream->internal_pad, stream);
+ gst_pad_set_active (stream->internal_pad, TRUE);
+ gst_pad_set_chain_function (stream->internal_pad, _src_chain);
+ gst_pad_set_event_function (stream->internal_pad, _src_event);
+ gst_pad_set_query_function (stream->internal_pad, _src_query);
+
+ if (gst_pad_link_full (stream->src_srcpad, stream->internal_pad,
+ GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK) {
+ GST_ERROR_OBJECT (stream->pad, "Failed to link internal pad");
+ return FALSE;
+ }
+
+ stream->uri_handler = uri_handler;
+ stream->queue = queue;
+
+ stream->last_status_code = 200; /* default to OK */
+ }
+ return TRUE;
+ }
+
+ static GstPadProbeReturn
+ gst_ad_stream_src_to_ready_cb (GstPad * pad, GstPadProbeInfo * info,
+ gpointer user_data)
+ {
+ GstAdaptiveDemuxStream *stream = user_data;
+
+ /* The source's src pad is IDLE so now set the state to READY */
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->src_at_ready = TRUE;
+ g_cond_signal (&stream->fragment_download_cond);
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ static const char *
+ uritype (GstAdaptiveDemuxStream * s)
+ {
+ if (s->downloading_header)
+ return "header";
+ if (s->downloading_index)
+ return "index";
+ return "fragment";
+ }
+ #endif
+
+ /* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ *
+ * Will return when URI is fully downloaded (or aborted/errored)
+ */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_download_uri (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, const gchar * uri, gint64 start,
+ gint64 end, guint * http_status)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GST_DEBUG_OBJECT (stream->pad,
+ "Downloading %s uri: %s, range:%" G_GINT64_FORMAT " - %" G_GINT64_FORMAT,
+ uritype (stream), uri, start, end);
+
+ if (http_status)
+ *http_status = 200; /* default to ok if no further information */
+
+ if (!gst_adaptive_demux_stream_update_source (stream, uri, NULL, FALSE, TRUE)) {
+ ret = stream->last_ret = GST_FLOW_ERROR;
+ return ret;
+ }
+
+ gst_element_set_locked_state (stream->src, TRUE);
+
+ GST_MANIFEST_UNLOCK (demux);
+ if (gst_element_set_state (stream->src,
+ GST_STATE_READY) != GST_STATE_CHANGE_FAILURE) {
+ /* If ranges are specified, seek to it */
+ if (start != 0 || end != -1) {
+ /* HTTP ranges are inclusive, GStreamer segments are exclusive for the
+ * stop position */
+ if (end != -1)
+ end += 1;
+ /* Send the seek event to the uri_handler, as the other pipeline elements
+ * can't handle it when READY. */
+ if (!gst_element_send_event (stream->uri_handler, gst_event_new_seek (1.0,
+ GST_FORMAT_BYTES, (GstSeekFlags) GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_SET, end))) {
+
+ GST_MANIFEST_LOCK (demux);
+ /* looks like the source can't handle seeks in READY */
+ g_clear_error (&stream->last_error);
+ stream->last_error = g_error_new (GST_CORE_ERROR,
+ GST_CORE_ERROR_NOT_IMPLEMENTED,
+ "Source element can't handle range requests");
+ stream->last_ret = GST_FLOW_ERROR;
+ } else {
+ GST_MANIFEST_LOCK (demux);
+ }
+ } else {
+ GST_MANIFEST_LOCK (demux);
+ }
+
+ if (G_LIKELY (stream->last_ret == GST_FLOW_OK)) {
+ stream->download_start_time =
+ GST_TIME_AS_USECONDS (gst_adaptive_demux_get_monotonic_time (demux));
+
+ /* src element is in state READY. Before we start it, we reset
+ * download_finished
+ */
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->download_finished = FALSE;
+ stream->downloading_first_buffer = TRUE;
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ if (!gst_element_sync_state_with_parent (stream->src)) {
+ GST_WARNING_OBJECT (demux, "Could not sync state for src element");
+ GST_MANIFEST_LOCK (demux);
+ ret = stream->last_ret = GST_FLOW_ERROR;
+ return ret;
+ }
+
+ /* wait for the fragment to be completely downloaded */
+ GST_DEBUG_OBJECT (stream->pad,
+ "Waiting for %s download to finish: %s", uritype (stream), uri);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ stream->src_at_ready = FALSE;
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_LOCK (demux);
+ ret = stream->last_ret = GST_FLOW_FLUSHING;
+ return ret;
+ }
+ /* download_finished is only set:
+ * * in ::fragment_download_finish()
+ * * if EOS is received on the _src pad
+ * */
+ while (!stream->cancelled && !stream->download_finished) {
+ g_cond_wait (&stream->fragment_download_cond,
+ &stream->fragment_download_lock);
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Finished Waiting for %s download: %s", uritype (stream), uri);
+
+ GST_MANIFEST_LOCK (demux);
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ ret = stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ ret = stream->last_ret;
+
+ GST_DEBUG_OBJECT (stream->pad, "%s download finished: %s %d %s",
+ uritype (stream), uri, stream->last_ret,
+ gst_flow_get_name (stream->last_ret));
+ if (stream->last_ret != GST_FLOW_OK && http_status) {
+ *http_status = stream->last_status_code;
+ }
+ }
+
+ /* changing src element state might try to join the streaming thread, so
+ * we must not hold the manifest lock.
+ */
+ GST_MANIFEST_UNLOCK (demux);
+ } else {
+ GST_MANIFEST_UNLOCK (demux);
+ if (stream->last_ret == GST_FLOW_OK)
+ stream->last_ret = GST_FLOW_CUSTOM_ERROR;
+ ret = GST_FLOW_CUSTOM_ERROR;
+ }
+
+ stream->src_at_ready = FALSE;
+
+ gst_element_set_locked_state (stream->src, TRUE);
+ gst_pad_add_probe (stream->src_srcpad, GST_PAD_PROBE_TYPE_IDLE,
+ gst_ad_stream_src_to_ready_cb, stream, NULL);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ while (!stream->src_at_ready) {
+ g_cond_wait (&stream->fragment_download_cond,
+ &stream->fragment_download_lock);
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ gst_element_set_state (stream->src, GST_STATE_READY);
+
+ /* Need to drop the fragment_download_lock to get the MANIFEST lock */
+ GST_MANIFEST_LOCK (demux);
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ ret = stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* deactivate and reactivate our ghostpad to make it fresh for a new
+ * stream */
+ gst_pad_set_active (stream->internal_pad, FALSE);
+ gst_pad_set_active (stream->internal_pad, TRUE);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_download_header_fragment (GstAdaptiveDemuxStream *
+ stream)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (stream->fragment.header_uri != NULL) {
+ GST_DEBUG_OBJECT (demux, "Fetching header %s %" G_GINT64_FORMAT "-%"
+ G_GINT64_FORMAT, stream->fragment.header_uri,
+ stream->fragment.header_range_start, stream->fragment.header_range_end);
+
+ stream->downloading_header = TRUE;
+ ret = gst_adaptive_demux_stream_download_uri (demux, stream,
+ stream->fragment.header_uri, stream->fragment.header_range_start,
+ stream->fragment.header_range_end, NULL);
+ stream->downloading_header = FALSE;
+ }
+
+ /* check if we have an index */
+ if (ret == GST_FLOW_OK) { /* TODO check for other valid types */
+
+ if (stream->fragment.index_uri != NULL) {
+ GST_DEBUG_OBJECT (demux,
+ "Fetching index %s %" G_GINT64_FORMAT "-%" G_GINT64_FORMAT,
+ stream->fragment.index_uri,
+ stream->fragment.index_range_start, stream->fragment.index_range_end);
+ stream->downloading_index = TRUE;
+ ret = gst_adaptive_demux_stream_download_uri (demux, stream,
+ stream->fragment.index_uri, stream->fragment.index_range_start,
+ stream->fragment.index_range_end, NULL);
+ stream->downloading_index = FALSE;
+ }
+ }
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_download_fragment (GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ gchar *url = NULL;
+ GstFlowReturn ret;
+ gboolean retried_once = FALSE, live;
+ guint http_status;
+ guint last_status_code;
+
+ /* FIXME : */
+ /* THERE ARE THREE DIFFERENT VARIABLES FOR THE "BEGINNING" OF A FRAGMENT ! */
+ stream->starting_fragment = TRUE;
+ stream->last_ret = GST_FLOW_OK;
+ stream->first_fragment_buffer = TRUE;
+
+ GST_DEBUG_OBJECT (stream->pad, "Downloading %s%s%s",
+ stream->fragment.uri ? "FRAGMENT " : "",
+ stream->fragment.header_uri ? "HEADER " : "",
+ stream->fragment.index_uri ? "INDEX" : "");
+
+ if (stream->fragment.uri == NULL && stream->fragment.header_uri == NULL &&
+ stream->fragment.index_uri == NULL)
+ goto no_url_error;
+
+ if (stream->need_header) {
+ ret = gst_adaptive_demux_stream_download_header_fragment (stream);
+ if (ret != GST_FLOW_OK) {
+ return ret;
+ }
+ stream->need_header = FALSE;
+ }
+
+ again:
+ ret = GST_FLOW_OK;
+ url = stream->fragment.uri;
+ GST_DEBUG_OBJECT (stream->pad, "Got url '%s' for stream %p", url, stream);
+ if (!url)
+ return ret;
+
+ stream->last_ret = GST_FLOW_OK;
+ http_status = 200;
+
+ /* Download the actual fragment, either in fragments or in one go */
+ if (klass->need_another_chunk && klass->need_another_chunk (stream)
+ && stream->fragment.chunk_size != 0) {
+ /* Handle chunk downloading */
+ gint64 range_start, range_end, chunk_start, chunk_end;
+ guint64 download_total_bytes;
+ gint chunk_size = stream->fragment.chunk_size;
+
+ range_start = chunk_start = stream->fragment.range_start;
+ range_end = stream->fragment.range_end;
+ /* HTTP ranges are inclusive for the end */
+ if (chunk_size != -1)
+ chunk_end = range_start + chunk_size - 1;
+ else
+ chunk_end = range_end;
+
+ if (range_end != -1)
+ chunk_end = MIN (chunk_end, range_end);
+
+ while (!stream->fragment.finished && (chunk_start <= range_end
+ || range_end == -1)) {
+ download_total_bytes = stream->download_total_bytes;
+
+ ret =
+ gst_adaptive_demux_stream_download_uri (demux, stream, url,
+ chunk_start, chunk_end, &http_status);
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Fragment chunk download result: %d (%d) %s", stream->last_ret,
+ http_status, gst_flow_get_name (stream->last_ret));
+
+ /* Don't retry for any chunks except the first. We would have sent
+ * data downstream already otherwise and it's difficult to recover
+ * from that in a meaningful way */
+ if (chunk_start > range_start)
+ retried_once = TRUE;
+
+ /* FIXME: Check for 416 Range Not Satisfiable here and fall back to
+ * downloading up to -1. We don't know the full duration.
+ * Needs https://bugzilla.gnome.org/show_bug.cgi?id=756806 */
+ if (ret != GST_FLOW_OK && chunk_end == -1) {
+ break;
+ } else if (ret != GST_FLOW_OK) {
+ chunk_end = -1;
+ stream->last_ret = GST_FLOW_OK;
+ continue;
+ }
+
+ if (chunk_end == -1)
+ break;
+
+ /* Short read, we're at the end now */
+ if (stream->download_total_bytes - download_total_bytes <
+ chunk_end + 1 - chunk_start)
+ break;
+
+ if (!klass->need_another_chunk (stream))
+ break;
+
+ /* HTTP ranges are inclusive for the end */
+ chunk_start += chunk_size;
+ chunk_size = stream->fragment.chunk_size;
+ if (chunk_size != -1)
+ chunk_end = chunk_start + chunk_size - 1;
+ else
+ chunk_end = range_end;
+
+ if (range_end != -1)
+ chunk_end = MIN (chunk_end, range_end);
+ }
+ } else {
+ ret =
+ gst_adaptive_demux_stream_download_uri (demux, stream, url,
+ stream->fragment.range_start, stream->fragment.range_end, &http_status);
+ GST_DEBUG_OBJECT (stream->pad, "Fragment download result: %d (%d) %s",
+ stream->last_ret, http_status, gst_flow_get_name (stream->last_ret));
+ }
+ if (ret == GST_FLOW_OK)
+ goto beach;
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ return ret;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* TODO check if we are truly stopping */
+ if (ret != GST_FLOW_CUSTOM_ERROR)
+ goto beach;
+
+ last_status_code = stream->last_status_code;
+ GST_WARNING_OBJECT (stream->pad, "Got custom error, status %u, dc %d",
+ last_status_code, stream->download_error_count);
+
+ live = gst_adaptive_demux_is_live (demux);
+ if (!retried_once && ((last_status_code / 100 == 4 && live)
+ || last_status_code / 100 == 5)) {
+ /* 4xx/5xx */
+ /* if current position is before available start, switch to next */
+ if (!gst_adaptive_demux_stream_has_next_fragment (demux, stream))
+ goto flushing;
+
+ if (live) {
+ gint64 range_start, range_stop;
+
+ if (!gst_adaptive_demux_get_live_seek_range (demux, &range_start,
+ &range_stop))
+ goto flushing;
+
+ if (demux->segment.position < range_start) {
+ GST_DEBUG_OBJECT (stream->pad, "Retrying once with next segment");
+ stream->last_ret = GST_FLOW_OK;
+ ret = gst_adaptive_demux_eos_handling (stream);
+ GST_DEBUG_OBJECT (stream->pad, "finish_fragment: %s",
+ gst_flow_get_name (ret));
+ GST_DEBUG_OBJECT (demux, "Calling update_fragment_info");
+ ret = gst_adaptive_demux_stream_update_fragment_info (demux, stream);
+ GST_DEBUG_OBJECT (stream->pad, "finish_fragment: %s",
+ gst_flow_get_name (ret));
+ if (ret == GST_FLOW_OK) {
+ retried_once = TRUE;
+ goto again;
+ }
+ } else if (demux->segment.position > range_stop) {
+ /* wait a bit to be in range, we don't have any locks at that point */
+ gint64 wait_time =
+ gst_adaptive_demux_stream_get_fragment_waiting_time (demux, stream);
+ if (wait_time > 0) {
+ gint64 end_time = g_get_monotonic_time () + wait_time / GST_USECOND;
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Download waiting for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wait_time));
+
+ GST_MANIFEST_UNLOCK (demux);
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_LOCK (demux);
+ stream->last_ret = GST_FLOW_FLUSHING;
+ goto flushing;
+ }
+ do {
+ g_cond_wait_until (&stream->fragment_download_cond,
+ &stream->fragment_download_lock, end_time);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_LOCK (demux);
+ stream->last_ret = GST_FLOW_FLUSHING;
+ goto flushing;
+ }
+ } while (!stream->download_finished);
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_MANIFEST_LOCK (demux);
+ }
+ }
+ }
+
+ flushing:
+ if (stream->download_error_count >= MAX_DOWNLOAD_ERROR_COUNT) {
+ /* looks like there is no way of knowing when a live stream has ended
+ * Have to assume we are falling behind and cause a manifest reload */
+ GST_DEBUG_OBJECT (stream->pad, "Converting error of live stream to EOS");
+ return GST_FLOW_EOS;
+ }
+ } else if (!gst_adaptive_demux_stream_has_next_fragment (demux, stream)) {
+ /* If this is the last fragment, consider failures EOS and not actual
+ * errors. Due to rounding errors in the durations, the last fragment
+ * might not actually exist */
+ GST_DEBUG_OBJECT (stream->pad, "Converting error for last fragment to EOS");
+ return GST_FLOW_EOS;
+ } else {
+ /* retry once (same segment) for 5xx (server errors) */
+ if (!retried_once) {
+ retried_once = TRUE;
+ /* wait a short time in case the server needs a bit to recover, we don't
+ * care if we get woken up before end time. We can use sleep here since
+ * we're already blocking and just want to wait some time. */
+ g_usleep (100000); /* a tenth of a second */
+ goto again;
+ }
+ }
+
+ beach:
+ return ret;
+
+ no_url_error:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("Failed to get fragment URL.")),
+ ("An error happened when getting fragment URL"));
+ gst_task_stop (stream->download_task);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* this function will take the manifest_lock and will keep it until the end.
+ * It will release it temporarily only when going to sleep.
+ * Every time it takes the manifest_lock, it will check for cancelled condition
+ */
+ static void
+ gst_adaptive_demux_stream_download_loop (GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemux *demux = stream->demux;
+ GstClockTime next_download = gst_adaptive_demux_get_monotonic_time (demux);
+ GstFlowReturn ret;
+ gboolean live;
+
+ GST_LOG_OBJECT (stream->pad, "download loop start");
+
+ GST_MANIFEST_LOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ goto cancelled;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* Check if we're done with our segment */
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ if (demux->segment.rate > 0) {
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop)
+ && stream->segment.position >= stream->segment.stop) {
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+ ret = GST_FLOW_EOS;
+ gst_task_stop (stream->download_task);
+ goto end_of_manifest;
+ }
+ } else {
+ if (GST_CLOCK_TIME_IS_VALID (demux->segment.start)
+ && stream->segment.position <= stream->segment.start) {
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+ ret = GST_FLOW_EOS;
+ gst_task_stop (stream->download_task);
+ goto end_of_manifest;
+ }
+ }
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ /* Cleanup old streams if any */
+ if (G_UNLIKELY (demux->priv->old_streams != NULL)) {
+ GList *old_streams = demux->priv->old_streams;
+ demux->priv->old_streams = NULL;
+
+ GST_DEBUG_OBJECT (stream->pad, "Cleaning up old streams");
+ g_list_free_full (old_streams,
+ (GDestroyNotify) gst_adaptive_demux_stream_free);
+ GST_DEBUG_OBJECT (stream->pad, "Cleaning up old streams (done)");
+
+ /* gst_adaptive_demux_stream_free had temporarily released the manifest_lock.
+ * Recheck the cancelled flag.
+ */
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ goto cancelled;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+
+ /* Restarting download, figure out new position
+ * FIXME : Move this to a separate function ? */
+ if (G_UNLIKELY (stream->restart_download)) {
+ GstEvent *seg_event;
+ GstClockTime cur, ts = 0;
+ gint64 pos;
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Activating stream due to reconfigure event");
+
+ if (gst_pad_peer_query_position (stream->pad, GST_FORMAT_TIME, &pos)) {
+ ts = (GstClockTime) pos;
+ GST_DEBUG_OBJECT (demux, "Downstream position: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+ } else {
+ /* query other pads as some faulty element in the pad's branch might
+ * reject position queries. This should be better than using the
+ * demux segment position that can be much ahead */
+ GList *iter;
+
+ for (iter = demux->streams; iter != NULL; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *cur_stream =
+ (GstAdaptiveDemuxStream *) iter->data;
+
+ if (gst_pad_peer_query_position (cur_stream->pad, GST_FORMAT_TIME,
+ &pos)) {
+ ts = (GstClockTime) pos;
+ GST_DEBUG_OBJECT (stream->pad, "Downstream position: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+ break;
+ }
+ }
+ }
+
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ cur =
+ gst_segment_to_stream_time (&stream->segment, GST_FORMAT_TIME,
+ stream->segment.position);
+
+ /* we might have already pushed this data */
+ ts = MAX (ts, cur);
+
+ GST_DEBUG_OBJECT (stream->pad, "Restarting stream at "
+ "position %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+
+ if (GST_CLOCK_TIME_IS_VALID (ts)) {
+ GstClockTime offset, period_start;
+
+ offset =
+ gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ period_start = gst_adaptive_demux_get_period_start_time (demux);
+
+ /* TODO check return */
+ gst_adaptive_demux_stream_seek (demux, stream, demux->segment.rate >= 0,
+ 0, ts, &ts);
+
+ stream->segment.position = ts - period_start + offset;
+ }
+
+ /* The stream's segment is still correct except for
+ * the position, so let's send a new one with the
+ * updated position */
+ seg_event = gst_event_new_segment (&stream->segment);
+ gst_event_set_seqnum (seg_event, demux->priv->segment_seqnum);
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (stream->pad, "Sending restart segment: %"
+ GST_PTR_FORMAT, seg_event);
+ gst_pad_push_event (stream->pad, seg_event);
+
+ stream->discont = TRUE;
+ stream->restart_download = FALSE;
+ }
+
+ live = gst_adaptive_demux_is_live (demux);
+
+ /* Get information about the fragment to download */
+ GST_DEBUG_OBJECT (demux, "Calling update_fragment_info");
+ ret = gst_adaptive_demux_stream_update_fragment_info (demux, stream);
+ GST_DEBUG_OBJECT (stream->pad, "Fragment info update result: %d %s",
+ ret, gst_flow_get_name (ret));
+ if (ret == GST_FLOW_OK) {
+
+ /* wait for live fragments to be available */
+ if (live) {
+ gint64 wait_time =
+ gst_adaptive_demux_stream_get_fragment_waiting_time (demux, stream);
+ if (wait_time > 0) {
+ GstClockTime end_time =
+ gst_adaptive_demux_get_monotonic_time (demux) + wait_time;
+
+ GST_DEBUG_OBJECT (stream->pad, "Download waiting for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wait_time));
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_LOCK (demux);
+ stream->last_ret = GST_FLOW_FLUSHING;
+ goto cancelled;
+ }
+ gst_adaptive_demux_wait_until (demux->realtime_clock,
+ &stream->fragment_download_cond, &stream->fragment_download_lock,
+ end_time);
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_DEBUG_OBJECT (stream->pad, "Download finished waiting");
+
+ GST_MANIFEST_LOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ goto cancelled;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+ }
+
+ stream->last_ret = GST_FLOW_OK;
+
+ next_download = gst_adaptive_demux_get_monotonic_time (demux);
+ ret = gst_adaptive_demux_stream_download_fragment (stream);
+
+ if (ret == GST_FLOW_FLUSHING) {
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ goto cancelled;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+ }
+
+ } else {
+ stream->last_ret = ret;
+ }
+
+ switch (ret) {
+ case GST_FLOW_OK:
+ break; /* all is good, let's go */
+ case GST_FLOW_EOS:
+ GST_DEBUG_OBJECT (stream->pad, "EOS, checking to stop download loop");
+
+ /* we push the EOS after releasing the object lock */
+ if (gst_adaptive_demux_is_live (demux)
+ && (demux->segment.rate == 1.0
+ || gst_adaptive_demux_stream_in_live_seek_range (demux,
+ stream))) {
+ GstAdaptiveDemuxClass *demux_class =
+ GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ /* this might be a fragment download error, refresh the manifest, just in case */
+ if (!demux_class->requires_periodical_playlist_update (demux)) {
+ ret = gst_adaptive_demux_update_manifest (demux);
+ break;
+ /* Wait only if we can ensure current manifest has been expired.
+ * The meaning "we have next period" *WITH* EOS is that, current
+ * period has been ended but we can continue to the next period */
+ } else if (!gst_adaptive_demux_has_next_period (demux) &&
+ gst_adaptive_demux_stream_wait_manifest_update (demux, stream)) {
+ goto end;
+ }
+ gst_task_stop (stream->download_task);
+ if (stream->replaced) {
+ goto end;
+ }
+ } else {
+ gst_task_stop (stream->download_task);
+ }
+
+ if (gst_adaptive_demux_combine_flows (demux) == GST_FLOW_EOS) {
+ if (gst_adaptive_demux_has_next_period (demux)) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "Next period available, not sending EOS");
+ gst_adaptive_demux_advance_period (demux);
+ ret = GST_FLOW_OK;
+ }
+ }
+ break;
+
+ case GST_FLOW_NOT_LINKED:
+ {
+ GstFlowReturn ret;
+ gst_task_stop (stream->download_task);
+
+ ret = gst_adaptive_demux_combine_flows (demux);
+ if (ret == GST_FLOW_NOT_LINKED) {
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ }
+ }
+ break;
+
+ case GST_FLOW_FLUSHING:{
+ GList *iter;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ GstAdaptiveDemuxStream *other;
+
+ other = iter->data;
+ gst_task_stop (other->download_task);
+ }
+ }
+ break;
+
+ default:
+ if (ret <= GST_FLOW_ERROR) {
+ gboolean is_live = gst_adaptive_demux_is_live (demux);
+ GST_WARNING_OBJECT (demux, "Error while downloading fragment");
+ if (++stream->download_error_count > MAX_DOWNLOAD_ERROR_COUNT) {
+ goto download_error;
+ }
+
+ g_clear_error (&stream->last_error);
+
+ /* First try to update the playlist for non-live playlists
+ * in case the URIs have changed in the meantime. But only
+ * try it the first time, after that we're going to wait a
+ * a bit to not flood the server */
+ if (stream->download_error_count == 1 && !is_live) {
+ /* TODO hlsdemux had more options to this function (boolean and err) */
+
+ if (gst_adaptive_demux_update_manifest (demux) == GST_FLOW_OK) {
+ /* Retry immediately, the playlist actually has changed */
+ GST_DEBUG_OBJECT (demux, "Updated the playlist");
+ goto end;
+ }
+ }
+
+ /* Wait half the fragment duration before retrying */
+ next_download += stream->fragment.duration / 2;
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ g_mutex_unlock (&stream->fragment_download_lock);
+ GST_MANIFEST_LOCK (demux);
+ stream->last_ret = GST_FLOW_FLUSHING;
+ goto cancelled;
+ }
+ gst_adaptive_demux_wait_until (demux->realtime_clock,
+ &stream->fragment_download_cond, &stream->fragment_download_lock,
+ next_download);
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ GST_DEBUG_OBJECT (demux, "Retrying now");
+
+ GST_MANIFEST_LOCK (demux);
+
+ g_mutex_lock (&stream->fragment_download_lock);
+ if (G_UNLIKELY (stream->cancelled)) {
+ stream->last_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&stream->fragment_download_lock);
+ goto cancelled;
+ }
+ g_mutex_unlock (&stream->fragment_download_lock);
+
+ /* Refetch the playlist now after we waited */
+ if (!is_live
+ && gst_adaptive_demux_update_manifest (demux) == GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Updated the playlist");
+ }
+ goto end;
+ }
+ break;
+ }
+
+ end_of_manifest:
+ if (G_UNLIKELY (ret == GST_FLOW_EOS)) {
+ if (GST_OBJECT_PARENT (stream->pad) != NULL) {
+ if (demux->next_streams == NULL && demux->prepared_streams == NULL) {
+ GST_DEBUG_OBJECT (stream->src, "Pushing EOS on pad");
+ gst_adaptive_demux_stream_push_event (stream, gst_event_new_eos ());
+ } else {
+ GST_DEBUG_OBJECT (stream->src,
+ "Stream is EOS, but we're switching fragments. Not sending.");
+ }
+ } else {
+ GST_ERROR_OBJECT (demux, "Can't push EOS on non-exposed pad");
+ goto download_error;
+ }
+ }
+
+ end:
+ GST_MANIFEST_UNLOCK (demux);
+ GST_LOG_OBJECT (stream->pad, "download loop end");
+ return;
+
+ cancelled:
+ {
+ GST_DEBUG_OBJECT (stream->pad, "Stream has been cancelled");
+ goto end;
+ }
+ download_error:
+ {
+ GstMessage *msg;
+
+ if (stream->last_error) {
+ gchar *debug = g_strdup_printf ("Error on stream %s:%s",
+ GST_DEBUG_PAD_NAME (stream->pad));
+ msg =
+ gst_message_new_error (GST_OBJECT_CAST (demux), stream->last_error,
+ debug);
+ GST_ERROR_OBJECT (stream->pad, "Download error: %s",
+ stream->last_error->message);
+ g_free (debug);
+ } else {
+ GError *err =
+ g_error_new (GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND,
+ _("Couldn't download fragments"));
+ msg =
+ gst_message_new_error (GST_OBJECT_CAST (demux), err,
+ "Fragment downloading has failed consecutive times");
+ g_error_free (err);
+ GST_ERROR_OBJECT (stream->pad,
+ "Download error: Couldn't download fragments, too many failures");
+ }
+
+ gst_task_stop (stream->download_task);
+ if (stream->src) {
+ GstElement *src = stream->src;
+
+ stream->src = NULL;
+ GST_MANIFEST_UNLOCK (demux);
+ gst_element_set_locked_state (src, TRUE);
+ gst_element_set_state (src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), src);
+ GST_MANIFEST_LOCK (demux);
+ }
+
+ gst_element_post_message (GST_ELEMENT_CAST (demux), msg);
+
+ goto end;
+ }
+ }
+
+ static void
+ gst_adaptive_demux_updates_loop (GstAdaptiveDemux * demux)
+ {
+ GstClockTime next_update;
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ /* Loop for updating of the playlist. This periodically checks if
+ * the playlist is updated and does so, then signals the streaming
+ * thread in case it can continue downloading now. */
+
+ /* block until the next scheduled update or the signal to quit this thread */
+ GST_DEBUG_OBJECT (demux, "Started updates task");
+
+ GST_MANIFEST_LOCK (demux);
+
+ next_update =
+ gst_adaptive_demux_get_monotonic_time (demux) +
+ klass->get_manifest_update_interval (demux) * GST_USECOND;
+
+ /* Updating playlist only needed for live playlists */
+ while (gst_adaptive_demux_is_live (demux)) {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Wait here until we should do the next update or we're cancelled */
+ GST_DEBUG_OBJECT (demux, "Wait for next playlist update");
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ g_mutex_lock (&demux->priv->updates_timed_lock);
+ if (demux->priv->stop_updates_task) {
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+ goto quit;
+ }
+ gst_adaptive_demux_wait_until (demux->realtime_clock,
+ &demux->priv->updates_timed_cond,
+ &demux->priv->updates_timed_lock, next_update);
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+
+ g_mutex_lock (&demux->priv->updates_timed_lock);
+ if (demux->priv->stop_updates_task) {
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+ goto quit;
+ }
+ g_mutex_unlock (&demux->priv->updates_timed_lock);
+
+ GST_MANIFEST_LOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "Updating playlist");
+
+ ret = gst_adaptive_demux_update_manifest (demux);
+
+ if (ret == GST_FLOW_EOS) {
+ } else if (ret != GST_FLOW_OK) {
+ /* update_failed_count is used only here, no need to protect it */
+ demux->priv->update_failed_count++;
+ if (demux->priv->update_failed_count <= DEFAULT_FAILED_COUNT) {
+ GST_WARNING_OBJECT (demux, "Could not update the playlist, flow: %s",
+ gst_flow_get_name (ret));
+ next_update = gst_adaptive_demux_get_monotonic_time (demux)
+ + klass->get_manifest_update_interval (demux) * GST_USECOND;
+ } else {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ (_("Internal data stream error.")), ("Could not update playlist"));
+ GST_DEBUG_OBJECT (demux, "Stopped updates task because of error");
+ gst_task_stop (demux->priv->updates_task);
+ GST_MANIFEST_UNLOCK (demux);
+ goto end;
+ }
+ } else {
+ GST_DEBUG_OBJECT (demux, "Updated playlist successfully");
+ demux->priv->update_failed_count = 0;
+ next_update =
+ gst_adaptive_demux_get_monotonic_time (demux) +
+ klass->get_manifest_update_interval (demux) * GST_USECOND;
+
+ /* Wake up download tasks */
+ g_mutex_lock (&demux->priv->manifest_update_lock);
+ g_cond_broadcast (&demux->priv->manifest_cond);
+ g_mutex_unlock (&demux->priv->manifest_update_lock);
+ }
+ }
+
+ GST_MANIFEST_UNLOCK (demux);
+
+ quit:
+ {
+ GST_DEBUG_OBJECT (demux, "Stop updates task request detected.");
+ }
+
+ end:
+ {
+ return;
+ }
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_stream_push_event (GstAdaptiveDemuxStream * stream,
+ GstEvent * event)
+ {
+ gboolean ret;
+ GstPad *pad;
+ GstAdaptiveDemux *demux = stream->demux;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ stream->eos = TRUE;
+ }
+
+ pad = gst_object_ref (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream));
+
+ /* Can't push events holding the manifest lock */
+ GST_MANIFEST_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (GST_ADAPTIVE_DEMUX_STREAM_PAD (stream),
+ "Pushing event %" GST_PTR_FORMAT, event);
+
+ ret = gst_pad_push_event (pad, event);
+
+ gst_object_unref (pad);
+
+ GST_MANIFEST_LOCK (demux);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_is_live (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->is_live)
+ return klass->is_live (demux);
+ return FALSE;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_seek (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, gboolean forward, GstSeekFlags flags,
+ GstClockTime ts, GstClockTime * final_ts)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->stream_seek)
+ return klass->stream_seek (stream, forward, flags, ts, final_ts);
+ return GST_FLOW_ERROR;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_stream_has_next_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ gboolean ret = TRUE;
+
+ if (klass->stream_has_next_fragment)
+ ret = klass->stream_has_next_fragment (stream);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ /* Called from:
+ * the ::finish_fragment() handlers when an *actual* fragment is done
+ * */
+ GstFlowReturn
+ gst_adaptive_demux_stream_advance_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstClockTime duration)
+ {
+ GstFlowReturn ret;
+
+ if (stream->last_ret == GST_FLOW_OK) {
+ stream->last_ret =
+ gst_adaptive_demux_stream_advance_fragment_unlocked (demux, stream,
+ duration);
+ }
+ ret = stream->last_ret;
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ GstFlowReturn
+ gst_adaptive_demux_stream_advance_fragment_unlocked (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstClockTime duration)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ GstFlowReturn ret;
+
+ g_return_val_if_fail (klass->stream_advance_fragment != NULL, GST_FLOW_ERROR);
+
+ GST_LOG_OBJECT (stream->pad,
+ "timestamp %" GST_TIME_FORMAT " duration:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->fragment.timestamp), GST_TIME_ARGS (duration));
+
+ stream->download_error_count = 0;
+ g_clear_error (&stream->last_error);
+
+ /* FIXME - url has no indication of byte ranges for subsegments */
+ /* FIXME : All those time statistics are biased, since they are calculated
+ * *AFTER* the queue2, which might be blocking. They should ideally be
+ * calculated *before* queue2 in the uri_handler_probe */
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+ "manifest-uri", G_TYPE_STRING,
+ demux->manifest_uri, "uri", G_TYPE_STRING,
+ stream->fragment.uri, "fragment-start-time",
+ GST_TYPE_CLOCK_TIME, stream->download_start_time,
+ "fragment-stop-time", GST_TYPE_CLOCK_TIME,
+ gst_util_get_timestamp (), "fragment-size", G_TYPE_UINT64,
+ stream->download_total_bytes, "fragment-download-time",
+ GST_TYPE_CLOCK_TIME, stream->last_download_time, NULL)));
+
+ /* Don't update to the end of the segment if in reverse playback */
+ GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+ if (GST_CLOCK_TIME_IS_VALID (duration) && demux->segment.rate > 0) {
+ GstClockTime offset =
+ gst_adaptive_demux_stream_get_presentation_offset (demux, stream);
+ GstClockTime period_start =
+ gst_adaptive_demux_get_period_start_time (demux);
+
+ stream->segment.position += duration;
+
+ /* Convert from position inside the stream's segment to the demuxer's
+ * segment, they are not necessarily the same */
+ if (stream->segment.position - offset + period_start >
+ demux->segment.position)
+ demux->segment.position =
+ stream->segment.position - offset + period_start;
+ }
+ GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+ /* When advancing with a non 1.0 rate on live streams, we need to check
+ * the live seeking range again to make sure we can still advance to
+ * that position */
+ if (demux->segment.rate != 1.0 && gst_adaptive_demux_is_live (demux)) {
+ if (!gst_adaptive_demux_stream_in_live_seek_range (demux, stream))
+ ret = GST_FLOW_EOS;
+ else
+ ret = klass->stream_advance_fragment (stream);
+ } else if (gst_adaptive_demux_is_live (demux)
+ || gst_adaptive_demux_stream_has_next_fragment (demux, stream)) {
+ ret = klass->stream_advance_fragment (stream);
+ } else {
+ ret = GST_FLOW_EOS;
+ }
+
+ stream->download_start_time =
+ GST_TIME_AS_USECONDS (gst_adaptive_demux_get_monotonic_time (demux));
+
+ if (ret == GST_FLOW_OK) {
+ if (gst_adaptive_demux_stream_select_bitrate (demux, stream,
+ gst_adaptive_demux_stream_update_current_bitrate (demux, stream))) {
+ stream->need_header = TRUE;
+ ret = (GstFlowReturn) GST_ADAPTIVE_DEMUX_FLOW_SWITCH;
+ }
+
+ /* the subclass might want to switch pads */
+ if (G_UNLIKELY (demux->next_streams)) {
+ GList *iter;
+ gboolean can_expose = TRUE;
+
+ gst_task_stop (stream->download_task);
+
+ ret = GST_FLOW_EOS;
+
+ for (iter = demux->streams; iter; iter = g_list_next (iter)) {
+ /* Only expose if all streams are now cancelled or finished downloading */
+ GstAdaptiveDemuxStream *other = iter->data;
+ if (other != stream) {
+ g_mutex_lock (&other->fragment_download_lock);
+ can_expose &= (other->cancelled == TRUE
+ || other->download_finished == TRUE);
+ g_mutex_unlock (&other->fragment_download_lock);
+ }
+ }
+
+ if (can_expose) {
+ GST_DEBUG_OBJECT (demux, "Subclass wants new pads "
+ "to do bitrate switching");
+ gst_adaptive_demux_prepare_streams (demux, FALSE);
+ gst_adaptive_demux_start_tasks (demux, TRUE);
+ } else {
+ GST_LOG_OBJECT (demux, "Not switching yet - ongoing downloads");
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_stream_select_bitrate (GstAdaptiveDemux *
+ demux, GstAdaptiveDemuxStream * stream, guint64 bitrate)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->stream_select_bitrate)
+ return klass->stream_select_bitrate (stream, bitrate);
+ return FALSE;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_stream_update_fragment_info (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ GstFlowReturn ret;
+
+ g_return_val_if_fail (klass->stream_update_fragment_info != NULL,
+ GST_FLOW_ERROR);
+
+ /* Make sure the sub-class will update bitrate, or else
+ * we will later */
+ stream->fragment.bitrate = 0;
+ stream->fragment.finished = FALSE;
+
+ GST_LOG_OBJECT (stream->pad, "position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->segment.position));
+
+ ret = klass->stream_update_fragment_info (stream);
+
+ GST_LOG_OBJECT (stream->pad, "ret:%s uri:%s", gst_flow_get_name (ret),
+ stream->fragment.uri);
+ if (ret == GST_FLOW_OK) {
+ GST_LOG_OBJECT (stream->pad,
+ "timestamp %" GST_TIME_FORMAT " duration:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->fragment.timestamp),
+ GST_TIME_ARGS (stream->fragment.duration));
+ GST_LOG_OBJECT (stream->pad,
+ "range start:%" G_GINT64_FORMAT " end:%" G_GINT64_FORMAT,
+ stream->fragment.range_start, stream->fragment.range_end);
+ }
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gint64
+ gst_adaptive_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux *
+ demux, GstAdaptiveDemuxStream * stream)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ if (klass->stream_get_fragment_waiting_time)
+ return klass->stream_get_fragment_waiting_time (stream);
+ return 0;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_update_manifest_default (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ GstFragment *download;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+ GError *error = NULL;
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ download = gst_uri_downloader_fetch_uri (demux->downloader,
++ demux->manifest_uri, NULL, demux->user_agent, demux->cookies,
++ PLAYLIST_ADAPTIVE_RETRY, PLAYLIST_ADAPTIVE_TIMEOUT, TRUE, TRUE, TRUE,
++ NULL);
++#else
+ download = gst_uri_downloader_fetch_uri (demux->downloader,
+ demux->manifest_uri, NULL, TRUE, TRUE, TRUE, &error);
++#endif
+ if (download) {
+ g_free (demux->manifest_uri);
+ g_free (demux->manifest_base_uri);
+ if (download->redirect_permanent && download->redirect_uri) {
+ demux->manifest_uri = g_strdup (download->redirect_uri);
+ demux->manifest_base_uri = NULL;
+ } else {
+ demux->manifest_uri = g_strdup (download->uri);
+ demux->manifest_base_uri = g_strdup (download->redirect_uri);
+ }
+
+ buffer = gst_fragment_get_buffer (download);
+ g_object_unref (download);
+ ret = klass->update_manifest_data (demux, buffer);
+ gst_buffer_unref (buffer);
+ /* FIXME: Should the manifest uri vars be reverted to original
+ * values if updating fails? */
+ } else {
+ GST_WARNING_OBJECT (demux, "Failed to download manifest: %s",
+ error->message);
+ ret = GST_FLOW_NOT_LINKED;
+ }
+ g_clear_error (&error);
+
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static GstFlowReturn
+ gst_adaptive_demux_update_manifest (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ GstFlowReturn ret;
+
+ ret = klass->update_manifest (demux);
+
+ if (ret == GST_FLOW_OK) {
+ GstClockTime duration;
+ /* Send an updated duration message */
+ duration = klass->get_duration (demux);
+ if (duration != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux,
+ "Sending duration message : %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration));
+ gst_element_post_message (GST_ELEMENT (demux),
+ gst_message_new_duration_changed (GST_OBJECT (demux)));
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "Duration unknown, can not send the duration message");
+ }
+
+ /* If a manifest changes it's liveness or periodic updateness, we need
+ * to start/stop the manifest update task appropriately */
+ /* Keep this condition in sync with the one in
+ * gst_adaptive_demux_start_manifest_update_task()
+ */
+ if (gst_adaptive_demux_is_live (demux) &&
+ klass->requires_periodical_playlist_update (demux)) {
+ gst_adaptive_demux_start_manifest_update_task (demux);
+ } else {
+ gst_adaptive_demux_stop_manifest_update_task (demux);
+ }
+ }
+
+ return ret;
+ }
+
+ void
+ gst_adaptive_demux_stream_fragment_clear (GstAdaptiveDemuxStreamFragment * f)
+ {
+ g_free (f->uri);
+ f->uri = NULL;
+ f->range_start = 0;
+ f->range_end = -1;
+
+ g_free (f->header_uri);
+ f->header_uri = NULL;
+ f->header_range_start = 0;
+ f->header_range_end = -1;
+
+ g_free (f->index_uri);
+ f->index_uri = NULL;
+ f->index_range_start = 0;
+ f->index_range_end = -1;
+
+ f->finished = FALSE;
+ }
+
+ /* must be called with manifest_lock taken */
+ static gboolean
+ gst_adaptive_demux_has_next_period (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+ gboolean ret = FALSE;
+
+ if (klass->has_next_period)
+ ret = klass->has_next_period (demux);
+ GST_DEBUG_OBJECT (demux, "Has next period: %d", ret);
+ return ret;
+ }
+
+ /* must be called with manifest_lock taken */
+ static void
+ gst_adaptive_demux_advance_period (GstAdaptiveDemux * demux)
+ {
+ GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+ g_return_if_fail (klass->advance_period != NULL);
+
+ GST_DEBUG_OBJECT (demux, "Advancing to next period");
+ klass->advance_period (demux);
+ gst_adaptive_demux_prepare_streams (demux, FALSE);
+ gst_adaptive_demux_start_tasks (demux, TRUE);
+ }
+
+ /**
+ * gst_adaptive_demux_get_monotonic_time:
+ * Returns: a monotonically increasing time, using the system realtime clock
+ */
+ GstClockTime
+ gst_adaptive_demux_get_monotonic_time (GstAdaptiveDemux * demux)
+ {
+ g_return_val_if_fail (demux != NULL, GST_CLOCK_TIME_NONE);
+ return gst_clock_get_time (demux->realtime_clock);
+ }
+
+ /**
+ * gst_adaptive_demux_get_client_now_utc:
+ * @demux: #GstAdaptiveDemux
+ * Returns: the client's estimate of UTC
+ *
+ * Used to find the client's estimate of UTC, using the system realtime clock.
+ */
+ GDateTime *
+ gst_adaptive_demux_get_client_now_utc (GstAdaptiveDemux * demux)
+ {
+ GstClockTime rtc_now;
+ GDateTime *unix_datetime;
+ GDateTime *result_datetime;
+ gint64 utc_now_in_us;
+
+ rtc_now = gst_clock_get_time (demux->realtime_clock);
+ utc_now_in_us = demux->clock_offset + GST_TIME_AS_USECONDS (rtc_now);
+ unix_datetime =
+ g_date_time_new_from_unix_utc (utc_now_in_us / G_TIME_SPAN_SECOND);
+ result_datetime =
+ g_date_time_add (unix_datetime, utc_now_in_us % G_TIME_SPAN_SECOND);
+ g_date_time_unref (unix_datetime);
+ return result_datetime;
+ }
+
+ /**
+ * gst_adaptive_demux_is_running
+ * @demux: #GstAdaptiveDemux
+ * Returns: whether the demuxer is processing data
+ *
+ * Returns FALSE if shutdown has started (transitioning down from
+ * PAUSED), otherwise TRUE.
+ */
+ gboolean
+ gst_adaptive_demux_is_running (GstAdaptiveDemux * demux)
+ {
+ return g_atomic_int_get (&demux->running);
+ }
+
+ static GstAdaptiveDemuxTimer *
+ gst_adaptive_demux_timer_new (GCond * cond, GMutex * mutex)
+ {
+ GstAdaptiveDemuxTimer *timer;
+
+ timer = g_slice_new (GstAdaptiveDemuxTimer);
+ timer->fired = FALSE;
+ timer->cond = cond;
+ timer->mutex = mutex;
+ g_atomic_int_set (&timer->ref_count, 1);
+ return timer;
+ }
+
+ static GstAdaptiveDemuxTimer *
+ gst_adaptive_demux_timer_ref (GstAdaptiveDemuxTimer * timer)
+ {
+ g_return_val_if_fail (timer != NULL, NULL);
+ g_atomic_int_inc (&timer->ref_count);
+ return timer;
+ }
+
+ static void
+ gst_adaptive_demux_timer_unref (GstAdaptiveDemuxTimer * timer)
+ {
+ g_return_if_fail (timer != NULL);
+ if (g_atomic_int_dec_and_test (&timer->ref_count)) {
+ g_slice_free (GstAdaptiveDemuxTimer, timer);
+ }
+ }
+
+ /* gst_adaptive_demux_wait_until:
+ * A replacement for g_cond_wait_until that uses the clock rather
+ * than system time to control the duration of the sleep. Typically
+ * clock is actually a #GstSystemClock, in which case this function
+ * behaves exactly like g_cond_wait_until. Inside unit tests,
+ * the clock is typically a #GstTestClock, which allows tests to run
+ * in non-realtime.
+ * This function must be called with mutex held.
+ */
+ static gboolean
+ gst_adaptive_demux_wait_until (GstClock * clock, GCond * cond, GMutex * mutex,
+ GstClockTime end_time)
+ {
+ GstAdaptiveDemuxTimer *timer;
+ gboolean fired;
+ GstClockReturn res;
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (end_time))) {
+ /* for an invalid time, gst_clock_id_wait_async will try to call
+ * gst_adaptive_demux_clock_callback from the current thread.
+ * It still holds the mutex while doing that, so it will deadlock.
+ * g_cond_wait_until would return immediately with false, so we'll do the same.
+ */
+ return FALSE;
+ }
+ timer = gst_adaptive_demux_timer_new (cond, mutex);
+ timer->clock_id = gst_clock_new_single_shot_id (clock, end_time);
+ res =
+ gst_clock_id_wait_async (timer->clock_id,
+ gst_adaptive_demux_clock_callback, gst_adaptive_demux_timer_ref (timer),
+ (GDestroyNotify) gst_adaptive_demux_timer_unref);
+ /* clock does not support asynchronously wait. Assert and return */
+ if (res == GST_CLOCK_UNSUPPORTED) {
+ gst_clock_id_unref (timer->clock_id);
+ gst_adaptive_demux_timer_unref (timer);
+ g_return_val_if_reached (TRUE);
+ }
+ g_assert (!timer->fired);
+ /* the gst_adaptive_demux_clock_callback() will signal the
+ * cond when the clock's single shot timer fires, or the cond will be
+ * signalled by another thread that wants to cause this wait to finish
+ * early (e.g. to terminate the waiting thread).
+ * There is no need for a while loop here, because that logic is
+ * implemented by the function calling gst_adaptive_demux_wait_until() */
+ g_cond_wait (cond, mutex);
+ fired = timer->fired;
+ if (!fired)
+ gst_clock_id_unschedule (timer->clock_id);
+ gst_clock_id_unref (timer->clock_id);
+ gst_adaptive_demux_timer_unref (timer);
+ return !fired;
+ }
+
+ static gboolean
+ gst_adaptive_demux_clock_callback (GstClock * clock,
+ GstClockTime time, GstClockID id, gpointer user_data)
+ {
+ GstAdaptiveDemuxTimer *timer = (GstAdaptiveDemuxTimer *) user_data;
+ g_return_val_if_fail (timer != NULL, FALSE);
+ g_mutex_lock (timer->mutex);
+ timer->fired = TRUE;
+ g_cond_signal (timer->cond);
+ g_mutex_unlock (timer->mutex);
+ return TRUE;
+ }
+
+ /**
+ * gst_adaptive_demux_get_qos_earliest_time:
+ *
+ * Returns: The QOS earliest time
+ *
+ * Since: 1.20
+ */
+ GstClockTime
+ gst_adaptive_demux_get_qos_earliest_time (GstAdaptiveDemux * demux)
+ {
+ GstClockTime earliest;
+
+ GST_OBJECT_LOCK (demux);
+ earliest = demux->priv->qos_earliest_time;
+ GST_OBJECT_UNLOCK (demux);
+
+ return earliest;
+ }
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ * Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef _GST_ADAPTIVE_DEMUX_H_
+ #define _GST_ADAPTIVE_DEMUX_H_
+
+ #include <gst/gst.h>
+ #include <gst/base/gstadapter.h>
+ #include <gst/uridownloader/gsturidownloader.h>
+ #include <gst/adaptivedemux/adaptive-demux-prelude.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_ADAPTIVE_DEMUX \
+ (gst_adaptive_demux_get_type())
+ #define GST_ADAPTIVE_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemux))
+ #define GST_ADAPTIVE_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemuxClass))
+ #define GST_ADAPTIVE_DEMUX_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemuxClass))
+ #define GST_IS_ADAPTIVE_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ADAPTIVE_DEMUX))
+ #define GST_IS_ADAPTIVE_DEMUX_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ADAPTIVE_DEMUX))
+ #define GST_ADAPTIVE_DEMUX_CAST(obj) ((GstAdaptiveDemux *)obj)
+
+ #define GST_ADAPTIVE_DEMUX_STREAM_CAST(obj) ((GstAdaptiveDemuxStream *)obj)
+
+ /**
+ * GST_ADAPTIVE_DEMUX_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+ #define GST_ADAPTIVE_DEMUX_SINK_NAME "sink"
+
+ /**
+ * GST_ADAPTIVE_DEMUX_SINK_PAD:
+ * @obj: a #GstAdaptiveDemux
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ */
+ #define GST_ADAPTIVE_DEMUX_SINK_PAD(obj) (((GstAdaptiveDemux *) (obj))->sinkpad)
+
+ #define GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS(obj) ((((GstAdaptiveDemux*)(obj))->segment.flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) == GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)
+
+ #define GST_ADAPTIVE_DEMUX_STREAM_PAD(obj) (((GstAdaptiveDemuxStream *) (obj))->pad)
+
+ #define GST_ADAPTIVE_DEMUX_STREAM_NEED_HEADER(obj) (((GstAdaptiveDemuxStream *) (obj))->need_header)
+
+ /**
+ * GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME:
+ *
+ * Name of the ELEMENT type messages posted by dashdemux with statistics.
+ *
+ * Since: 1.6
+ */
+ #define GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME "adaptive-streaming-statistics"
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++#define GST_ADAPTIVE_DEMUX_VARIANT_MESSAGE_NAME "adaptive-streaming-variant"
++#endif
+
+ #define GST_ELEMENT_ERROR_FROM_ERROR(el, msg, err) G_STMT_START { \
+ gchar *__dbg = g_strdup_printf ("%s: %s", msg, err->message); \
+ GST_WARNING_OBJECT (el, "error: %s", __dbg); \
+ gst_element_message_full (GST_ELEMENT(el), GST_MESSAGE_ERROR, \
+ err->domain, err->code, \
+ NULL, __dbg, __FILE__, GST_FUNCTION, __LINE__); \
+ g_clear_error (&err); \
+ } G_STMT_END
+
+ /* DEPRECATED */
+ #define GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT GST_FLOW_CUSTOM_SUCCESS_1
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++#define DEFAULT_ADAPTIVE_RETRY -1
++#define DEFAULT_ADAPTIVE_TIMEOUT -1
++#define PLAYLIST_ADAPTIVE_RETRY 3
++#define PLAYLIST_ADAPTIVE_TIMEOUT 2
++#define DEFAULT_BANDWIDTH_LIMIT -1
++#define DEFAULT_RESOLUTION_LIMIT -1
++#endif
+
+ typedef struct _GstAdaptiveDemuxStreamFragment GstAdaptiveDemuxStreamFragment;
+ typedef struct _GstAdaptiveDemuxStream GstAdaptiveDemuxStream;
+ typedef struct _GstAdaptiveDemux GstAdaptiveDemux;
+ typedef struct _GstAdaptiveDemuxClass GstAdaptiveDemuxClass;
+ typedef struct _GstAdaptiveDemuxPrivate GstAdaptiveDemuxPrivate;
+
+ struct _GstAdaptiveDemuxStreamFragment
+ {
+ GstClockTime timestamp;
+ GstClockTime duration;
+
+ gchar *uri;
+ gint64 range_start;
+ gint64 range_end;
+
+ /* when chunked downloading is used, may be be updated need_another_chunk() */
+ guint chunk_size;
+
+ /* when headers are needed */
+ gchar *header_uri;
+ gint64 header_range_start;
+ gint64 header_range_end;
+
+ /* when index is needed */
+ gchar *index_uri;
+ gint64 index_range_start;
+ gint64 index_range_end;
+
+ /* Nominal bitrate as provided by
+ * sub-class or calculated by base-class */
+ guint bitrate;
+
+ gboolean finished;
+ };
+
+ struct _GstAdaptiveDemuxStream
+ {
+ GstPad *pad;
+ GstPad *internal_pad;
+
+ GstAdaptiveDemux *demux;
+
+ GstSegment segment;
+
+ GstCaps *pending_caps;
+ GstEvent *pending_segment;
+ GstTagList *pending_tags;
+ gboolean need_header;
+ GList *pending_events;
+
+ GstFlowReturn last_ret;
+ GError *last_error;
+
+ GstTask *download_task;
+ GRecMutex download_lock;
+
+ gboolean restart_download;
+ gboolean discont;
+
+ gboolean downloading_first_buffer;
+ gboolean downloading_header;
+ gboolean downloading_index;
+
+ gboolean bitrate_changed;
+
+ /* download tooling */
+ GstElement *src;
+ guint last_status_code;
+ GstPad *src_srcpad;
+ GstElement *uri_handler;
+ GstElement *queue;
+ GMutex fragment_download_lock;
+ GCond fragment_download_cond;
+ gboolean download_finished; /* protected by fragment_download_lock */
+ gboolean cancelled; /* protected by fragment_download_lock */
+ gboolean replaced; /* replaced in a bitrate switch (used with cancelled) */
+ gboolean src_at_ready; /* protected by fragment_download_lock */
+ gboolean starting_fragment;
+ gboolean first_fragment_buffer;
+ gint64 download_start_time;
+ gint64 download_total_bytes;
+ guint64 current_download_rate;
+
+ /* amount of data downloaded in current fragment (pre-queue2) */
+ guint64 fragment_bytes_downloaded;
+ /* bitrate of the previous fragment (pre-queue2) */
+ guint64 last_bitrate;
+ /* latency (request to first byte) and full download time (request to EOS)
+ * of previous fragment (pre-queue2) */
+ GstClockTime last_latency;
+ GstClockTime last_download_time;
+
+ /* Average for the last fragments */
+ guint64 moving_bitrate;
+ guint moving_index;
+ guint64 *fragment_bitrates;
+
+ /* QoS data : UNUSED !!! */
+ GstClockTime qos_earliest_time;
+
+ GstAdaptiveDemuxStreamFragment fragment;
+
+ guint download_error_count;
+
+ /* TODO check if used */
+ gboolean eos;
+
+ gboolean do_block; /* TRUE if stream should block on preroll */
+ };
+
+ /**
+ * GstAdaptiveDemux:
+ *
+ * The opaque #GstAdaptiveDemux data structure.
+ */
+ struct _GstAdaptiveDemux
+ {
+ /*< private >*/
+ GstBin bin;
+
+ gint running;
+
+ gsize stream_struct_size;
+
+ /*< protected >*/
+ GstPad *sinkpad;
+
+ GstUriDownloader *downloader;
+
+ GList *streams;
+ GList *prepared_streams;
+ GList *next_streams;
+
+ GstSegment segment;
+
+ gchar *manifest_uri;
+ gchar *manifest_base_uri;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ gchar *user_agent;
++ gchar **cookies;
++#endif
+
+ /* Properties */
+ gfloat bitrate_limit; /* limit of the available bitrate to use */
+ guint connection_speed;
+
+ gboolean have_group_id;
+ guint group_id;
+
+ /* Realtime clock */
+ GstClock *realtime_clock;
+ gint64 clock_offset; /* offset between realtime_clock and UTC (in usec) */
+
+ /* < private > */
+ GstAdaptiveDemuxPrivate *priv;
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ gchar *start_bandwidth;
++ gint min_bandwidth;
++ gint max_bandwidth;
++ gint max_width;
++ gint max_height;
++#endif
+ };
+
+ /**
+ * GstAdaptiveDemuxClass:
+ *
+ */
+ struct _GstAdaptiveDemuxClass
+ {
+ /*< private >*/
+ GstBinClass bin_class;
+
+ /*< public >*/
+
+ /**
+ * process_manifest: Parse the manifest
+ * @demux: #GstAdaptiveDemux
+ * @manifest: the manifest to be parsed
+ *
+ * Parse the manifest and add the created streams using
+ * gst_adaptive_demux_stream_new()
+ *
+ * Returns: %TRUE if successful
+ */
+ gboolean (*process_manifest) (GstAdaptiveDemux * demux, GstBuffer * manifest);
+
+ /**
+ * get_manifest_update_interval:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Used during live streaming, the subclass should return the interval
+ * between successive manifest updates
+ *
+ * Returns: the update interval in microseconds
+ */
+ gint64 (*get_manifest_update_interval) (GstAdaptiveDemux * demux);
+
+ /**
+ * update_manifest:
+ * @demux: #GstAdaptiveDemux
+ *
+ * During live streaming, this will be called for the subclass to update its
+ * manifest with the new version. By default it fetches the manifest URI
+ * and passes it to GstAdaptiveDemux::update_manifest_data().
+ *
+ * Returns: #GST_FLOW_OK is all succeeded, #GST_FLOW_EOS if the stream ended
+ * or #GST_FLOW_ERROR if an error happened
+ */
+ GstFlowReturn (*update_manifest) (GstAdaptiveDemux * demux);
+
+ /**
+ * update_manifest_data:
+ * @demux: #GstAdaptiveDemux
+ * @buf: Downloaded manifest data
+ *
+ * During live streaming, this will be called for the subclass to update its
+ * manifest with the new version
+ *
+ * Returns: #GST_FLOW_OK is all succeeded, #GST_FLOW_EOS if the stream ended
+ * or #GST_FLOW_ERROR if an error happened
+ */
+ GstFlowReturn (*update_manifest_data) (GstAdaptiveDemux * demux, GstBuffer * buf);
+
+ gboolean (*is_live) (GstAdaptiveDemux * demux);
+ GstClockTime (*get_duration) (GstAdaptiveDemux * demux);
+
+ /**
+ * reset:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Reset the internal state of the subclass, getting ready to restart with
+ * a new stream afterwards
+ */
+ void (*reset) (GstAdaptiveDemux * demux);
+
+ /**
+ * seek:
+ * @demux: #GstAdaptiveDemux
+ * @seek: a seek #GstEvent
+ *
+ * The demuxer should seek on all its streams to the specified position
+ * in the seek event
+ *
+ * Returns: %TRUE if successful
+ */
+ gboolean (*seek) (GstAdaptiveDemux * demux, GstEvent * seek);
+
+ /**
+ * has_next_period:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Checks if there is a next period following the current one.
+ * DASH can have multiple medias chained in its manifest, when one finishes
+ * this function is called to verify if there is a new period to be played
+ * in sequence.
+ *
+ * Returns: %TRUE if there is another period
+ */
+ gboolean (*has_next_period) (GstAdaptiveDemux * demux);
+ /**
+ * advance_period:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Advances the manifest to the next period. New streams should be created
+ * using gst_adaptive_demux_stream_new().
+ */
+ void (*advance_period) (GstAdaptiveDemux * demux);
+
+ void (*stream_free) (GstAdaptiveDemuxStream * stream);
+ GstFlowReturn (*stream_seek) (GstAdaptiveDemuxStream * stream, gboolean forward, GstSeekFlags flags, GstClockTime target_ts, GstClockTime * final_ts);
+ gboolean (*stream_has_next_fragment) (GstAdaptiveDemuxStream * stream);
+ GstFlowReturn (*stream_advance_fragment) (GstAdaptiveDemuxStream * stream);
+
+ /**
+ * need_another_chunk:
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * If chunked downloading is used (chunk_size != 0) this is called once a
+ * chunk is finished to decide whether more has to be downloaded or not.
+ * May update chunk_size to a different value
+ */
+ gboolean (*need_another_chunk) (GstAdaptiveDemuxStream * stream);
+
+ /**
+ * stream_update_fragment_info:
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * Requests the stream to set the information about the current fragment to its
+ * current fragment struct
+ *
+ * Returns: #GST_FLOW_OK in success, #GST_FLOW_ERROR on error and #GST_FLOW_EOS
+ * if there is no fragment.
+ */
+ GstFlowReturn (*stream_update_fragment_info) (GstAdaptiveDemuxStream * stream);
+ /**
+ * stream_select_bitrate:
+ * @stream: #GstAdaptiveDemuxStream
+ * @bitrate: the bitrate to select (in bytes per second)
+ *
+ * The stream should try to select the bitrate that is the greater, but not
+ * greater than the requested bitrate. If it needs a codec change it should
+ * create the new stream using gst_adaptive_demux_stream_new(). If it only
+ * needs a caps change it should set the new caps using
+ * gst_adaptive_demux_stream_set_caps().
+ *
+ * Returns: %TRUE if the stream changed bitrate, %FALSE otherwise
+ */
+ gboolean (*stream_select_bitrate) (GstAdaptiveDemuxStream * stream, guint64 bitrate);
+ /**
+ * stream_get_fragment_waiting_time:
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * For live streams, requests how much time should be waited before starting
+ * to download the fragment. This is useful to avoid downloading a fragment that
+ * isn't available yet.
+ *
+ * Returns: The waiting time in microseconds
+ */
+ gint64 (*stream_get_fragment_waiting_time) (GstAdaptiveDemuxStream * stream);
+
+ /**
+ * start_fragment:
+ * @demux: #GstAdaptiveDemux
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * Notifies the subclass that the given stream is starting the download
+ * of a new fragment. Can be used to reset/init internal state that is
+ * needed before each fragment, like decryption engines.
+ *
+ * Returns: %TRUE if successful.
+ */
+ gboolean (*start_fragment) (GstAdaptiveDemux * demux, GstAdaptiveDemuxStream * stream);
+ /**
+ * finish_fragment:
+ * @demux: #GstAdaptiveDemux
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * Notifies the subclass that a fragment download was finished.
+ * It can be used to cleanup internal state after a fragment and
+ * also push any pending data before moving to the next fragment.
+ */
+ GstFlowReturn (*finish_fragment) (GstAdaptiveDemux * demux, GstAdaptiveDemuxStream * stream);
+ /**
+ * data_received:
+ * @demux: #GstAdaptiveDemux
+ * @stream: #GstAdaptiveDemuxStream
+ * @buffer: #GstBuffer
+ *
+ * Notifies the subclass that a fragment chunk was downloaded. The subclass
+ * can look at the data and modify/push data as desired.
+ *
+ * Returns: #GST_FLOW_OK if successful, #GST_FLOW_ERROR in case of error.
+ */
+ GstFlowReturn (*data_received) (GstAdaptiveDemux * demux, GstAdaptiveDemuxStream * stream, GstBuffer * buffer);
+
+ /**
+ * get_live_seek_range:
+ * @demux: #GstAdaptiveDemux
+ * @start: pointer to put the start position allowed to seek to
+ * @stop: pointer to put the stop position allowed to seek to
+ *
+ * Gets the allowed seek start and stop positions for the current live stream
+ *
+ * Return: %TRUE if successful
+ */
+ gboolean (*get_live_seek_range) (GstAdaptiveDemux * demux, gint64 * start, gint64 * stop);
+
+ /**
+ * get_presentation_offset:
+ * @demux: #GstAdaptiveDemux
+ * @stream: #GstAdaptiveDemuxStream
+ *
+ * Gets the delay to apply to @stream.
+ *
+ * Return: a #GstClockTime representing the (positive) time offset to apply to
+ * @stream.
+ */
+ GstClockTime (*get_presentation_offset) (GstAdaptiveDemux *demux, GstAdaptiveDemuxStream *stream);
+
+ /**
+ * get_period_start_time:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Gets the start time of the current period. Timestamps are resetting to 0
+ * after each period but we have to maintain a continuous stream and running
+ * time so need to know the start time of the current period.
+ *
+ * Return: a #GstClockTime representing the start time of the currently
+ * selected period.
+ */
+ GstClockTime (*get_period_start_time) (GstAdaptiveDemux *demux);
+
+ /**
+ * requires_periodical_playlist_update:
+ * @demux: #GstAdaptiveDemux
+ *
+ * Some adaptive streaming protocols allow the client to download
+ * the playlist once and build up the fragment list based on the
+ * current fragment metadata. For those protocols the demuxer
+ * doesn't need to periodically refresh the playlist. This vfunc
+ * is relevant only for live playback scenarios.
+ *
+ * Return: %TRUE if the playlist needs to be refreshed periodically by the demuxer.
+ */
+ gboolean (*requires_periodical_playlist_update) (GstAdaptiveDemux * demux);
+ };
+
+ GST_ADAPTIVE_DEMUX_API
+ GType gst_adaptive_demux_get_type (void);
+
+ GST_ADAPTIVE_DEMUX_API
+ void gst_adaptive_demux_set_stream_struct_size (GstAdaptiveDemux * demux,
+ gsize struct_size);
+
+
+ GST_ADAPTIVE_DEMUX_API
+ GstAdaptiveDemuxStream *gst_adaptive_demux_stream_new (GstAdaptiveDemux * demux,
+ GstPad * pad);
+
+ GST_ADAPTIVE_DEMUX_API
+ GstAdaptiveDemuxStream *gst_adaptive_demux_find_stream_for_pad (GstAdaptiveDemux * demux,
+ GstPad * pad);
+
+ GST_ADAPTIVE_DEMUX_API
+ void gst_adaptive_demux_stream_set_caps (GstAdaptiveDemuxStream * stream,
+ GstCaps * caps);
+
+ GST_ADAPTIVE_DEMUX_API
+ void gst_adaptive_demux_stream_set_tags (GstAdaptiveDemuxStream * stream,
+ GstTagList * tags);
+
+ GST_ADAPTIVE_DEMUX_API
+ void gst_adaptive_demux_stream_fragment_clear (GstAdaptiveDemuxStreamFragment * f);
+
+ GST_ADAPTIVE_DEMUX_API
+ GstFlowReturn gst_adaptive_demux_stream_push_buffer (GstAdaptiveDemuxStream * stream, GstBuffer * buffer);
+
+ GST_ADAPTIVE_DEMUX_API
+ GstFlowReturn
+ gst_adaptive_demux_stream_advance_fragment (GstAdaptiveDemux * demux,
+ GstAdaptiveDemuxStream * stream, GstClockTime duration);
+
+ GST_ADAPTIVE_DEMUX_API
+ void gst_adaptive_demux_stream_queue_event (GstAdaptiveDemuxStream * stream,
+ GstEvent * event);
+
+ GST_ADAPTIVE_DEMUX_API
+ GstClockTime gst_adaptive_demux_get_monotonic_time (GstAdaptiveDemux * demux);
+
+ GST_ADAPTIVE_DEMUX_API
+ GDateTime *gst_adaptive_demux_get_client_now_utc (GstAdaptiveDemux * demux);
+
+ GST_ADAPTIVE_DEMUX_API
+ gboolean gst_adaptive_demux_is_running (GstAdaptiveDemux * demux);
+
+ GST_ADAPTIVE_DEMUX_API
+ GstClockTime gst_adaptive_demux_get_qos_earliest_time (GstAdaptiveDemux *demux);
+
+ G_END_DECLS
+
+ #endif
+
--- /dev/null
- int shift = VP8_BD_VALUE_SIZE - 8 - (count + 8);
+ /*
+ * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "dboolhuff.h"
+
+ int
+ vp8dx_start_decode (BOOL_DECODER * br,
+ const unsigned char *source,
+ unsigned int source_sz, vp8_decrypt_cb * decrypt_cb, void *decrypt_state)
+ {
+ br->user_buffer_end = source + source_sz;
+ br->user_buffer = source;
+ br->value = 0;
+ br->count = -8;
+ br->range = 255;
+ br->decrypt_cb = decrypt_cb;
+ br->decrypt_state = decrypt_state;
+
+ if (source_sz && !source)
+ return 1;
+
+ /* Populate the buffer */
+ vp8dx_bool_decoder_fill (br);
+
+ return 0;
+ }
+
+ void
+ vp8dx_bool_decoder_fill (BOOL_DECODER * br)
+ {
+ const unsigned char *bufptr = br->user_buffer;
+ VP8_BD_VALUE value = br->value;
+ int count = br->count;
- bytes_left > sizeof (decrypted) ? sizeof (decrypted) : bytes_left;
++ int shift = VP8_BD_VALUE_SIZE - CHAR_BIT - (count + CHAR_BIT);
+ size_t bytes_left = br->user_buffer_end - bufptr;
+ size_t bits_left = bytes_left * CHAR_BIT;
+ int x = (int) (shift + CHAR_BIT - bits_left);
+ int loop_end = 0;
+ unsigned char decrypted[sizeof (VP8_BD_VALUE) + 1];
+
+ if (br->decrypt_cb) {
+ size_t n =
++ MIN (sizeof (decrypted), bytes_left);
+ br->decrypt_cb (br->decrypt_state, bufptr, decrypted, (int) n);
+ bufptr = decrypted;
+ }
+
+ if (x >= 0) {
+ count += VP8_LOTS_OF_BITS;
+ loop_end = x;
+ }
+
+ if (x < 0 || bits_left) {
+ while (shift >= loop_end) {
+ count += CHAR_BIT;
+ value |= (VP8_BD_VALUE) * bufptr << shift;
+ ++bufptr;
+ ++br->user_buffer;
+ shift -= CHAR_BIT;
+ }
+ }
+
+ br->value = value;
+ br->count = count;
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2011 Andoni Morales Alastruey <ylatuya@gmail.com>
+ *
+ * gstfragment.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <glib.h>
+ #include "gstfragment.h"
+ #include "gsturidownloader.h"
+ #include "gsturidownloader_debug.h"
+
+ #define GST_CAT_DEFAULT uridownloader_debug
+ GST_DEBUG_CATEGORY (uridownloader_debug);
+
+ struct _GstUriDownloaderPrivate
+ {
+ /* Fragments fetcher */
+ GstElement *urisrc;
+ GstBus *bus;
+ GstPad *pad;
+ GstFragment *download;
+ gboolean got_buffer;
+ GMutex download_lock; /* used to restrict to one download only */
+
+ GWeakRef parent;
+
+ GError *err;
+
+ GCond cond;
+ gboolean cancelled;
+ };
+
+ static void gst_uri_downloader_finalize (GObject * object);
+ static void gst_uri_downloader_dispose (GObject * object);
+
+ static GstFlowReturn gst_uri_downloader_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static gboolean gst_uri_downloader_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static GstBusSyncReply gst_uri_downloader_bus_handler (GstBus * bus,
+ GstMessage * message, gpointer data);
+
+ static gboolean gst_uri_downloader_ensure_src (GstUriDownloader * downloader,
+ const gchar * uri);
+ static void gst_uri_downloader_destroy_src (GstUriDownloader * downloader);
+
+ static GstStaticPadTemplate sinkpadtemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ #define _do_init \
+ { \
+ GST_DEBUG_CATEGORY_INIT (uridownloader_debug, "uridownloader", 0, "URI downloader"); \
+ }
+
+ G_DEFINE_TYPE_WITH_CODE (GstUriDownloader, gst_uri_downloader, GST_TYPE_OBJECT,
+ G_ADD_PRIVATE (GstUriDownloader)
+ _do_init);
+
+ static void
+ gst_uri_downloader_class_init (GstUriDownloaderClass * klass)
+ {
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->dispose = gst_uri_downloader_dispose;
+ gobject_class->finalize = gst_uri_downloader_finalize;
+ }
+
+ static void
+ gst_uri_downloader_init (GstUriDownloader * downloader)
+ {
+ downloader->priv = gst_uri_downloader_get_instance_private (downloader);
+
+ /* Initialize the sink pad. This pad will be connected to the src pad of the
+ * element created with gst_element_make_from_uri and will handle the download */
+ downloader->priv->pad =
+ gst_pad_new_from_static_template (&sinkpadtemplate, "sink");
+ gst_pad_set_chain_function (downloader->priv->pad,
+ GST_DEBUG_FUNCPTR (gst_uri_downloader_chain));
+ gst_pad_set_event_function (downloader->priv->pad,
+ GST_DEBUG_FUNCPTR (gst_uri_downloader_sink_event));
+ gst_pad_set_element_private (downloader->priv->pad, downloader);
+ gst_pad_set_active (downloader->priv->pad, TRUE);
+
+ /* Create a bus to handle error and warning message from the source element */
+ downloader->priv->bus = gst_bus_new ();
+
+ g_mutex_init (&downloader->priv->download_lock);
+ g_cond_init (&downloader->priv->cond);
+ }
+
+ static void
+ gst_uri_downloader_dispose (GObject * object)
+ {
+ GstUriDownloader *downloader = GST_URI_DOWNLOADER (object);
+
+ gst_uri_downloader_destroy_src (downloader);
+
+ if (downloader->priv->bus != NULL) {
+ gst_object_unref (downloader->priv->bus);
+ downloader->priv->bus = NULL;
+ }
+
+ if (downloader->priv->pad) {
+ gst_object_unref (downloader->priv->pad);
+ downloader->priv->pad = NULL;
+ }
+
+ if (downloader->priv->download) {
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = NULL;
+ }
+
+ g_weak_ref_clear (&downloader->priv->parent);
+
+ G_OBJECT_CLASS (gst_uri_downloader_parent_class)->dispose (object);
+ }
+
+ static void
+ gst_uri_downloader_finalize (GObject * object)
+ {
+ GstUriDownloader *downloader = GST_URI_DOWNLOADER (object);
+
+ g_mutex_clear (&downloader->priv->download_lock);
+ g_cond_clear (&downloader->priv->cond);
+
+ G_OBJECT_CLASS (gst_uri_downloader_parent_class)->finalize (object);
+ }
+
+ GstUriDownloader *
+ gst_uri_downloader_new (void)
+ {
+ GstUriDownloader *downloader;
+
+ downloader = g_object_new (GST_TYPE_URI_DOWNLOADER, NULL);
+ gst_object_ref_sink (downloader);
+
+ return downloader;
+ }
+
+ /**
+ * gst_uri_downloader_set_parent:
+ * @param downloader: the #GstUriDownloader
+ * @param parent: the parent #GstElement
+ *
+ * Sets an element as parent of this #GstUriDownloader so that context
+ * requests from the underlying source are proxied to the main pipeline
+ * and set back if a context was provided.
+ */
+ void
+ gst_uri_downloader_set_parent (GstUriDownloader * downloader,
+ GstElement * parent)
+ {
+ g_weak_ref_set (&downloader->priv->parent, parent);
+ }
+
+ static gboolean
+ gst_uri_downloader_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean ret = FALSE;
+ GstUriDownloader *downloader;
+
+ downloader = GST_URI_DOWNLOADER (gst_pad_get_element_private (pad));
+
+ switch (event->type) {
+ case GST_EVENT_EOS:{
+ GST_OBJECT_LOCK (downloader);
+ GST_DEBUG_OBJECT (downloader, "Got EOS on the fetcher pad");
+ if (downloader->priv->download != NULL) {
+ /* signal we have fetched the URI */
+ downloader->priv->download->completed = TRUE;
+ downloader->priv->download->download_stop_time =
+ gst_util_get_timestamp ();
+ GST_DEBUG_OBJECT (downloader, "Signaling chain funtion");
+ g_cond_signal (&downloader->priv->cond);
+ }
+ GST_OBJECT_UNLOCK (downloader);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:{
+ const GstStructure *str;
+ str = gst_event_get_structure (event);
+ if (gst_structure_has_name (str, "http-headers")) {
+ GST_OBJECT_LOCK (downloader);
+ if (downloader->priv->download != NULL) {
+ if (downloader->priv->download->headers)
+ gst_structure_free (downloader->priv->download->headers);
+ downloader->priv->download->headers = gst_structure_copy (str);
+ }
+ GST_OBJECT_UNLOCK (downloader);
+ }
+ }
+ /* falls through */
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstBusSyncReply
+ gst_uri_downloader_bus_handler (GstBus * bus,
+ GstMessage * message, gpointer data)
+ {
+ GstUriDownloader *downloader = (GstUriDownloader *) (data);
+
+ if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) {
+ GError *err = NULL;
+ gchar *dbg_info = NULL;
+ gchar *new_error = NULL;
+
+ gst_message_parse_error (message, &err, &dbg_info);
+ GST_WARNING_OBJECT (downloader,
+ "Received error: %s from %s, the download will be cancelled",
+ err->message, GST_OBJECT_NAME (message->src));
+ GST_DEBUG ("Debugging info: %s", (dbg_info) ? dbg_info : "none");
+
+ if (dbg_info)
+ new_error = g_strdup_printf ("%s: %s\n", err->message, dbg_info);
+ if (new_error) {
+ g_free (err->message);
+ err->message = new_error;
+ }
+
+ if (!downloader->priv->err)
+ downloader->priv->err = err;
+ else
+ g_error_free (err);
+
+ g_free (dbg_info);
+
+ /* remove the sync handler to avoid duplicated messages */
+ gst_bus_set_sync_handler (downloader->priv->bus, NULL, NULL, NULL);
+
+ /* stop the download */
+ GST_OBJECT_LOCK (downloader);
+ if (downloader->priv->download != NULL) {
+ GST_DEBUG_OBJECT (downloader, "Stopping download");
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = NULL;
+ downloader->priv->cancelled = TRUE;
+ g_cond_signal (&downloader->priv->cond);
+ }
+ GST_OBJECT_UNLOCK (downloader);
+ } else if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_WARNING) {
+ GError *err = NULL;
+ gchar *dbg_info = NULL;
+
+ gst_message_parse_warning (message, &err, &dbg_info);
+ GST_WARNING_OBJECT (downloader,
+ "Received warning: %s from %s",
+ GST_OBJECT_NAME (message->src), err->message);
+ GST_DEBUG ("Debugging info: %s", (dbg_info) ? dbg_info : "none");
+ g_error_free (err);
+ g_free (dbg_info);
+ } else if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_NEED_CONTEXT) {
+ GstElement *parent = g_weak_ref_get (&downloader->priv->parent);
+
+ /* post the same need-context as if it was from the parent and then
+ * get it to our internal element that requested it */
+ if (parent && GST_IS_ELEMENT (GST_MESSAGE_SRC (message))) {
+ const gchar *context_type;
+ GstContext *context;
+ GstElement *msg_src = GST_ELEMENT_CAST (GST_MESSAGE_SRC (message));
+
+ gst_message_parse_context_type (message, &context_type);
+ context = gst_element_get_context (parent, context_type);
+
+ /* No context, request one */
+ if (!context) {
+ GstMessage *need_context_msg =
+ gst_message_new_need_context (GST_OBJECT_CAST (parent),
+ context_type);
+ gst_element_post_message (parent, need_context_msg);
+ context = gst_element_get_context (parent, context_type);
+ }
+
+ if (context) {
+ gst_element_set_context (msg_src, context);
+ gst_context_unref (context);
+ }
+ }
+ if (parent)
+ gst_object_unref (parent);
+ }
+
+ gst_message_unref (message);
+ return GST_BUS_DROP;
+ }
+
+ static GstFlowReturn
+ gst_uri_downloader_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ GstUriDownloader *downloader;
+
+ downloader = GST_URI_DOWNLOADER (gst_pad_get_element_private (pad));
+
+ /* HTML errors (404, 500, etc...) are also pushed through this pad as
+ * response but the source element will also post a warning or error message
+ * in the bus, which is handled synchronously cancelling the download.
+ */
+ GST_OBJECT_LOCK (downloader);
+ if (downloader->priv->download == NULL) {
+ /* Download cancelled, quit */
+ gst_buffer_unref (buf);
+ GST_OBJECT_UNLOCK (downloader);
+ goto done;
+ }
+
+ GST_LOG_OBJECT (downloader, "The uri fetcher received a new buffer "
+ "of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
+ downloader->priv->got_buffer = TRUE;
+ if (!gst_fragment_add_buffer (downloader->priv->download, buf)) {
+ GST_WARNING_OBJECT (downloader, "Could not add buffer to fragment");
+ gst_buffer_unref (buf);
+ }
+ GST_OBJECT_UNLOCK (downloader);
+
+ done:
+ {
+ return GST_FLOW_OK;
+ }
+ }
+
+ void
+ gst_uri_downloader_reset (GstUriDownloader * downloader)
+ {
+ g_return_if_fail (downloader != NULL);
+
+ GST_OBJECT_LOCK (downloader);
+ downloader->priv->cancelled = FALSE;
+ GST_OBJECT_UNLOCK (downloader);
+ }
+
+ void
+ gst_uri_downloader_cancel (GstUriDownloader * downloader)
+ {
+ GST_OBJECT_LOCK (downloader);
+ if (downloader->priv->download != NULL) {
+ GST_DEBUG_OBJECT (downloader, "Cancelling download");
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = NULL;
+ downloader->priv->cancelled = TRUE;
+ GST_DEBUG_OBJECT (downloader, "Signaling chain funtion");
+ g_cond_signal (&downloader->priv->cond);
+ } else {
+ gboolean cancelled;
+
+ cancelled = downloader->priv->cancelled;
+ downloader->priv->cancelled = TRUE;
+ if (cancelled)
+ GST_DEBUG_OBJECT (downloader,
+ "Trying to cancel a download that was alredy cancelled");
+ }
+ GST_OBJECT_UNLOCK (downloader);
+ }
+
+ static gboolean
+ gst_uri_downloader_set_range (GstUriDownloader * downloader,
+ gint64 range_start, gint64 range_end)
+ {
+ g_return_val_if_fail (range_start >= 0, FALSE);
+ g_return_val_if_fail (range_end >= -1, FALSE);
+
+ if (range_start || (range_end >= 0)) {
+ GstEvent *seek;
+
+ seek = gst_event_new_seek (1.0, GST_FORMAT_BYTES, GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, range_start, GST_SEEK_TYPE_SET, range_end);
+
+ return gst_element_send_event (downloader->priv->urisrc, seek);
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ gst_uri_downloader_ensure_src (GstUriDownloader * downloader, const gchar * uri)
+ {
+ if (downloader->priv->urisrc) {
+ gchar *old_protocol, *new_protocol;
+ gchar *old_uri;
+
+ old_uri =
+ gst_uri_handler_get_uri (GST_URI_HANDLER (downloader->priv->urisrc));
+ old_protocol = gst_uri_get_protocol (old_uri);
+ new_protocol = gst_uri_get_protocol (uri);
+
+ if (!g_str_equal (old_protocol, new_protocol)) {
+ gst_uri_downloader_destroy_src (downloader);
+ GST_DEBUG_OBJECT (downloader, "Can't re-use old source element");
+ } else {
+ GError *err = NULL;
+
+ GST_DEBUG_OBJECT (downloader, "Re-using old source element");
+ if (!gst_uri_handler_set_uri
+ (GST_URI_HANDLER (downloader->priv->urisrc), uri, &err)) {
+ GST_DEBUG_OBJECT (downloader,
+ "Failed to re-use old source element: %s", err->message);
+ g_clear_error (&err);
+ gst_uri_downloader_destroy_src (downloader);
+ }
+ }
+ g_free (old_uri);
+ g_free (old_protocol);
+ g_free (new_protocol);
+ }
+
+ if (!downloader->priv->urisrc) {
+ GST_DEBUG_OBJECT (downloader, "Creating source element for the URI:%s",
+ uri);
+ downloader->priv->urisrc =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ if (downloader->priv->urisrc) {
+ /* gst_element_make_from_uri returns a floating reference
+ * and we are not going to transfer the ownership, so we
+ * should take it.
+ */
+ gst_object_ref_sink (downloader->priv->urisrc);
+ }
+ }
+
+ return downloader->priv->urisrc != NULL;
+ }
+
+ static void
+ gst_uri_downloader_destroy_src (GstUriDownloader * downloader)
+ {
+ if (!downloader->priv->urisrc)
+ return;
+
+ gst_element_set_state (downloader->priv->urisrc, GST_STATE_NULL);
+ gst_object_unref (downloader->priv->urisrc);
+ downloader->priv->urisrc = NULL;
+ }
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++static gboolean
++gst_uri_downloader_set_uri (GstUriDownloader * downloader, const gchar * uri,
++ const gchar * referer, gchar* user_agent, gchar** cookies, gint max_retry , gint timeout , gboolean compress, gboolean refresh,
++ gboolean allow_cache)
++#else
+ static gboolean
+ gst_uri_downloader_set_uri (GstUriDownloader * downloader, const gchar * uri,
+ const gchar * referer, gboolean compress,
+ gboolean refresh, gboolean allow_cache)
++#endif
+ {
+ GstPad *pad;
+ GObjectClass *gobject_class;
+
+ if (!gst_uri_is_valid (uri))
+ return FALSE;
+
+ if (!gst_uri_downloader_ensure_src (downloader, uri))
+ return FALSE;
+
+ gobject_class = G_OBJECT_GET_CLASS (downloader->priv->urisrc);
+ if (g_object_class_find_property (gobject_class, "compress"))
+ g_object_set (downloader->priv->urisrc, "compress", compress, NULL);
+ if (g_object_class_find_property (gobject_class, "keep-alive"))
+ g_object_set (downloader->priv->urisrc, "keep-alive", TRUE, NULL);
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ if (user_agent && g_object_class_find_property (gobject_class, "user-agent"))
++ g_object_set (downloader->priv->urisrc, "user-agent", user_agent, NULL);
++ if (cookies && g_object_class_find_property (gobject_class, "cookies"))
++ g_object_set (downloader->priv->urisrc, "cookies", cookies, NULL);
++
++ if ((max_retry != -1)&&(g_object_class_find_property (gobject_class, "retries")))
++ g_object_set (downloader->priv->urisrc, "retries", max_retry, NULL);
++ if ((timeout != -1)&&(g_object_class_find_property (gobject_class, "timeout")))
++ g_object_set (downloader->priv->urisrc, "timeout", timeout, NULL);
++#endif
+ if (g_object_class_find_property (gobject_class, "extra-headers")) {
+ if (referer || refresh || !allow_cache) {
+ GstStructure *extra_headers = gst_structure_new_empty ("headers");
+
+ if (referer)
+ gst_structure_set (extra_headers, "Referer", G_TYPE_STRING, referer,
+ NULL);
+
+ if (!allow_cache)
+ gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
+ "no-cache", NULL);
+ else if (refresh)
+ gst_structure_set (extra_headers, "Cache-Control", G_TYPE_STRING,
+ "max-age=0", NULL);
+
+ g_object_set (downloader->priv->urisrc, "extra-headers", extra_headers,
+ NULL);
+
+ gst_structure_free (extra_headers);
+ } else {
+ g_object_set (downloader->priv->urisrc, "extra-headers", NULL, NULL);
+ }
+ }
+
+ /* add a sync handler for the bus messages to detect errors in the download */
+ gst_element_set_bus (GST_ELEMENT (downloader->priv->urisrc),
+ downloader->priv->bus);
+ gst_bus_set_sync_handler (downloader->priv->bus,
+ gst_uri_downloader_bus_handler, downloader, NULL);
+
+ pad = gst_element_get_static_pad (downloader->priv->urisrc, "src");
+ if (!pad)
+ return FALSE;
+ gst_pad_link (pad, downloader->priv->pad);
+ gst_object_unref (pad);
+ return TRUE;
+ }
+
+ static gboolean
+ gst_uri_downloader_set_method (GstUriDownloader * downloader,
+ const gchar * method)
+ {
+ GObjectClass *gobject_class;
+
+ if (!downloader->priv->urisrc)
+ return FALSE;
+
+ gobject_class = G_OBJECT_GET_CLASS (downloader->priv->urisrc);
+ if (g_object_class_find_property (gobject_class, "method")) {
+ g_object_set (downloader->priv->urisrc, "method", method, NULL);
+ return TRUE;
+ }
+ return FALSE;
+ }
-
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++GstFragment *
++gst_uri_downloader_fetch_uri (GstUriDownloader * downloader,
++ const gchar * uri, const gchar * referer, gchar* user_agent, gchar** cookies, gint max_retry , gint timeout , gboolean compress,
++ gboolean refresh, gboolean allow_cache, GError ** err)
++{
++ return gst_uri_downloader_fetch_uri_with_range (downloader, uri,
++ referer, user_agent, cookies, max_retry, timeout, compress, refresh, allow_cache, 0, -1, err);
++}
++#else
+ GstFragment *
+ gst_uri_downloader_fetch_uri (GstUriDownloader * downloader,
+ const gchar * uri, const gchar * referer, gboolean compress,
+ gboolean refresh, gboolean allow_cache, GError ** err)
+ {
+ return gst_uri_downloader_fetch_uri_with_range (downloader, uri,
+ referer, compress, refresh, allow_cache, 0, -1, err);
+ }
-
++#endif
+ /**
+ * gst_uri_downloader_fetch_uri_with_range:
+ * @downloader: the #GstUriDownloader
+ * @uri: the uri
+ * @range_start: the starting byte index
+ * @range_end: the final byte index, use -1 for unspecified
+ *
+ * Returns the downloaded #GstFragment
+ */
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++GstFragment *
++gst_uri_downloader_fetch_uri_with_range (GstUriDownloader *
++ downloader, const gchar * uri, const gchar * referer, gchar* user_agent, gchar** cookies, gint max_retry , gint timeout , gboolean compress,
++ gboolean refresh, gboolean allow_cache,
++ gint64 range_start, gint64 range_end, GError ** err)
++#else
+ GstFragment *
+ gst_uri_downloader_fetch_uri_with_range (GstUriDownloader *
+ downloader, const gchar * uri, const gchar * referer, gboolean compress,
+ gboolean refresh, gboolean allow_cache,
+ gint64 range_start, gint64 range_end, GError ** err)
++#endif
+ {
+ GstStateChangeReturn ret;
+ GstFragment *download = NULL;
+
+ GST_DEBUG_OBJECT (downloader, "Fetching URI %s", uri);
+
+ g_mutex_lock (&downloader->priv->download_lock);
+ downloader->priv->err = NULL;
+ downloader->priv->got_buffer = FALSE;
+
+ GST_OBJECT_LOCK (downloader);
+ if (downloader->priv->cancelled) {
+ GST_DEBUG_OBJECT (downloader, "Cancelled, aborting fetch");
+ goto quit;
+ }
-
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++ if (!gst_uri_downloader_set_uri (downloader, uri, referer, user_agent, cookies, max_retry, timeout ,compress, refresh,
++ allow_cache)) {
++ GST_WARNING_OBJECT (downloader, "Failed to set URI");
++ goto quit;
++ }
++#else
+ if (!gst_uri_downloader_set_uri (downloader, uri, referer, compress, refresh,
+ allow_cache)) {
+ GST_WARNING_OBJECT (downloader, "Failed to set URI");
+ goto quit;
+ }
++#endif
+ gst_bus_set_flushing (downloader->priv->bus, FALSE);
+ if (downloader->priv->download)
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = gst_fragment_new ();
+ downloader->priv->download->range_start = range_start;
+ downloader->priv->download->range_end = range_end;
+ GST_OBJECT_UNLOCK (downloader);
+ ret = gst_element_set_state (downloader->priv->urisrc, GST_STATE_READY);
+ GST_OBJECT_LOCK (downloader);
+ if (ret == GST_STATE_CHANGE_FAILURE || downloader->priv->download == NULL) {
+ GST_WARNING_OBJECT (downloader, "Failed to set src to READY");
+ goto quit;
+ }
+
+ /* might have been cancelled because of failures in state change */
+ if (downloader->priv->cancelled) {
+ goto quit;
+ }
+
+ if (range_start < 0 && range_end < 0) {
+ if (!gst_uri_downloader_set_method (downloader, "HEAD")) {
+ GST_WARNING_OBJECT (downloader, "Failed to set HTTP method");
+ goto quit;
+ }
+ } else {
+ if (!gst_uri_downloader_set_range (downloader, range_start, range_end)) {
+ GST_WARNING_OBJECT (downloader, "Failed to set range");
+ goto quit;
+ }
+ }
+
+ GST_OBJECT_UNLOCK (downloader);
+ ret = gst_element_set_state (downloader->priv->urisrc, GST_STATE_PLAYING);
+ GST_OBJECT_LOCK (downloader);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ if (downloader->priv->download) {
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = NULL;
+ }
+ goto quit;
+ }
+
+ /* might have been cancelled because of failures in state change */
+ if (downloader->priv->cancelled) {
+ goto quit;
+ }
+
+ /* wait until:
+ * - the download succeed (EOS in the src pad)
+ * - the download failed (Error message on the fetcher bus)
+ * - the download was canceled
+ */
+ GST_DEBUG_OBJECT (downloader, "Waiting to fetch the URI %s", uri);
+ while (!downloader->priv->cancelled && !downloader->priv->download->completed)
+ g_cond_wait (&downloader->priv->cond, GST_OBJECT_GET_LOCK (downloader));
+
+ if (downloader->priv->cancelled) {
+ if (downloader->priv->download) {
+ g_object_unref (downloader->priv->download);
+ downloader->priv->download = NULL;
+ }
+ goto quit;
+ }
+
+ download = downloader->priv->download;
+ downloader->priv->download = NULL;
+ if (!downloader->priv->got_buffer) {
+ if (download->range_start < 0 && download->range_end < 0) {
+ /* HEAD request, so we don't expect a response */
+ } else {
+ g_object_unref (download);
+ download = NULL;
+ GST_ERROR_OBJECT (downloader, "Didn't retrieve a buffer before EOS");
+ }
+ }
+
+ if (download != NULL)
+ GST_INFO_OBJECT (downloader, "URI fetched successfully");
+ else
+ GST_INFO_OBJECT (downloader, "Error fetching URI");
+
+ quit:
+ {
+ if (downloader->priv->urisrc) {
+ GstPad *pad;
+ GstElement *urisrc;
+
+ urisrc = downloader->priv->urisrc;
+
+ GST_DEBUG_OBJECT (downloader, "Stopping source element %s",
+ GST_ELEMENT_NAME (urisrc));
+
+ /* remove the bus' sync handler */
+ gst_bus_set_sync_handler (downloader->priv->bus, NULL, NULL, NULL);
+ gst_bus_set_flushing (downloader->priv->bus, TRUE);
+
+ /* set the element state to NULL */
+ GST_OBJECT_UNLOCK (downloader);
+ if (download == NULL) {
+ gst_element_set_state (urisrc, GST_STATE_NULL);
+ } else {
+ GstQuery *query;
+
+ /* Download successful, let's query the URI */
+ query = gst_query_new_uri ();
+ if (gst_element_query (urisrc, query)) {
+ gst_query_parse_uri (query, &download->uri);
+ gst_query_parse_uri_redirection (query, &download->redirect_uri);
+ gst_query_parse_uri_redirection_permanent (query,
+ &download->redirect_permanent);
+ }
+ gst_query_unref (query);
+ gst_element_set_state (urisrc, GST_STATE_READY);
+ }
+ GST_OBJECT_LOCK (downloader);
+ gst_element_set_bus (urisrc, NULL);
+
+ /* unlink the source element from the internal pad */
+ pad = gst_pad_get_peer (downloader->priv->pad);
+ if (pad) {
+ gst_pad_unlink (pad, downloader->priv->pad);
+ gst_object_unref (pad);
+ }
+ }
+ GST_OBJECT_UNLOCK (downloader);
+
+ if (download == NULL) {
+ if (!downloader->priv->err) {
+ g_set_error (err, GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_OPEN_READ,
+ "Failed to download '%s'", uri);
+ } else {
+ g_propagate_error (err, downloader->priv->err);
+ downloader->priv->err = NULL;
+ }
+ }
+
+ downloader->priv->cancelled = FALSE;
+
+ g_mutex_unlock (&downloader->priv->download_lock);
+ return download;
+ }
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2011 Andoni Morales Alastruey <ylatuya@gmail.com>
+ *
+ * gsturidownloader.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * Youshould have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GSTURI_DOWNLOADER_H__
+ #define __GSTURI_DOWNLOADER_H__
+
+ #ifndef GST_USE_UNSTABLE_API
+ #warning "The UriDownloaded library from gst-plugins-bad is unstable API and may change in future."
+ #warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+ #endif
+
+ #include <glib-object.h>
+ #include <gst/gst.h>
+ #include "gstfragment.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_URI_DOWNLOADER (gst_uri_downloader_get_type())
+ #define GST_URI_DOWNLOADER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_URI_DOWNLOADER,GstUriDownloader))
+ #define GST_URI_DOWNLOADER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_URI_DOWNLOADER,GstUriDownloaderClass))
+ #define GST_IS_URI_DOWNLOADER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_URI_DOWNLOADER))
+ #define GST_IS_URI_DOWNLOADER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_URI_DOWNLOADER))
+
+ typedef struct _GstUriDownloader GstUriDownloader;
+ typedef struct _GstUriDownloaderPrivate GstUriDownloaderPrivate;
+ typedef struct _GstUriDownloaderClass GstUriDownloaderClass;
+
+ struct _GstUriDownloader
+ {
+ GstObject parent;
+
+ GstUriDownloaderPrivate *priv;
+ };
+
+ struct _GstUriDownloaderClass
+ {
+ GstObjectClass parent_class;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+ };
+
+ GST_URI_DOWNLOADER_API
+ GType gst_uri_downloader_get_type (void);
+
+ GST_URI_DOWNLOADER_API
+ GstUriDownloader * gst_uri_downloader_new (void);
+
+ GST_URI_DOWNLOADER_API
+ void gst_uri_downloader_set_parent (GstUriDownloader * downloader, GstElement * parent);
+
++#ifdef TIZEN_FEATURE_ADAPTIVE_MODIFICATION
++GST_URI_DOWNLOADER_API
++GstFragment * gst_uri_downloader_fetch_uri (GstUriDownloader * downloader, const gchar * uri, const gchar * referer, gchar* user_agent, gchar** cookies, gint max_retry , gint timeout , gboolean compress, gboolean refresh, gboolean allow_cache, GError ** err);
++
++GST_URI_DOWNLOADER_API
++GstFragment * gst_uri_downloader_fetch_uri_with_range (GstUriDownloader * downloader, const gchar * uri, const gchar * referer, gchar* user_agent, gchar** cookies, gint max_retry , gint timeout , gboolean compress, gboolean refresh, gboolean allow_cache, gint64 range_start, gint64 range_end, GError ** err);
++#else
+ GST_URI_DOWNLOADER_API
+ GstFragment * gst_uri_downloader_fetch_uri (GstUriDownloader * downloader, const gchar * uri, const gchar * referer, gboolean compress, gboolean refresh, gboolean allow_cache, GError ** err);
+
+ GST_URI_DOWNLOADER_API
+ GstFragment * gst_uri_downloader_fetch_uri_with_range (GstUriDownloader * downloader, const gchar * uri, const gchar * referer, gboolean compress, gboolean refresh, gboolean allow_cache, gint64 range_start, gint64 range_end, GError ** err);
++#endif
+
+ GST_URI_DOWNLOADER_API
+ void gst_uri_downloader_reset (GstUriDownloader *downloader);
+
+ GST_URI_DOWNLOADER_API
+ void gst_uri_downloader_cancel (GstUriDownloader *downloader);
+
+ G_END_DECLS
+ #endif /* __GSTURIDOWNLOADER_H__ */
--- /dev/null
- registry_handle_global
+ /*
+ * GStreamer
+ * Copyright (C) 2016 Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/vulkan/wayland/gstvkdisplay_wayland.h>
+
+ #include "wayland_event_source.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_vulkan_display_wayland_debug);
+ #define GST_CAT_DEFAULT gst_vulkan_display_wayland_debug
+
+ G_DEFINE_TYPE_WITH_CODE (GstVulkanDisplayWayland, gst_vulkan_display_wayland,
+ GST_TYPE_VULKAN_DISPLAY, GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT,
+ "vulkandisplaywayland", 0, "Vulkan Wayland Display");
+ );
+
+ static void gst_vulkan_display_wayland_finalize (GObject * object);
+ static gpointer gst_vulkan_display_wayland_get_handle (GstVulkanDisplay *
+ display);
+
+ static void
+ registry_handle_global (void *data, struct wl_registry *registry,
+ uint32_t name, const char *interface, uint32_t version)
+ {
+ GstVulkanDisplayWayland *display = data;
+
+ GST_TRACE_OBJECT (display, "registry_handle_global with registry %p, "
+ "interface %s, version %u", registry, interface, version);
+
+ if (g_strcmp0 (interface, "wl_compositor") == 0) {
+ display->compositor =
+ wl_registry_bind (registry, name, &wl_compositor_interface, 1);
+ } else if (g_strcmp0 (interface, "wl_subcompositor") == 0) {
+ display->subcompositor =
+ wl_registry_bind (registry, name, &wl_subcompositor_interface, 1);
+ } else if (g_strcmp0 (interface, "wl_shell") == 0) {
+ display->shell = wl_registry_bind (registry, name, &wl_shell_interface, 1);
+ }
+ }
+
++static void
++registry_handle_global_remove (void *data, struct wl_registry *registry,
++ uint32_t name)
++{
++ GST_LOG ("Removed global object: name(%d)", name);
++}
++
+ static const struct wl_registry_listener registry_listener = {
++ registry_handle_global,
++ registry_handle_global_remove
+ };
+
+ static void
+ _connect_listeners (GstVulkanDisplayWayland * display)
+ {
+ display->registry = wl_display_get_registry (display->display);
+ wl_registry_add_listener (display->registry, ®istry_listener, display);
+
+ wl_display_roundtrip (display->display);
+ }
+
+ static void
+ gst_vulkan_display_wayland_class_init (GstVulkanDisplayWaylandClass * klass)
+ {
+ GST_VULKAN_DISPLAY_CLASS (klass)->get_handle =
+ GST_DEBUG_FUNCPTR (gst_vulkan_display_wayland_get_handle);
+
+ G_OBJECT_CLASS (klass)->finalize = gst_vulkan_display_wayland_finalize;
+ }
+
+ static void
+ gst_vulkan_display_wayland_init (GstVulkanDisplayWayland * display_wayland)
+ {
+ GstVulkanDisplay *display = (GstVulkanDisplay *) display_wayland;
+
+ display->type = GST_VULKAN_DISPLAY_TYPE_WAYLAND;
+ display_wayland->foreign_display = FALSE;
+ }
+
+ static void
+ gst_vulkan_display_wayland_finalize (GObject * object)
+ {
+ GstVulkanDisplayWayland *display_wayland =
+ GST_VULKAN_DISPLAY_WAYLAND (object);
+
+ if (!display_wayland->foreign_display && display_wayland->display) {
+ wl_display_flush (display_wayland->display);
+ wl_display_disconnect (display_wayland->display);
+ }
+
+ G_OBJECT_CLASS (gst_vulkan_display_wayland_parent_class)->finalize (object);
+ }
+
+ /**
+ * gst_vulkan_display_wayland_new:
+ * @name: (allow-none): a display name
+ *
+ * Create a new #GstVulkanDisplayWayland from the wayland display name. See `wl_display_connect`()
+ * for details on what is a valid name.
+ *
+ * Returns: (transfer full): a new #GstVulkanDisplayWayland or %NULL
+ *
+ * Since: 1.18
+ */
+ GstVulkanDisplayWayland *
+ gst_vulkan_display_wayland_new (const gchar * name)
+ {
+ GstVulkanDisplayWayland *ret;
+
+ ret = g_object_new (GST_TYPE_VULKAN_DISPLAY_WAYLAND, NULL);
+ gst_object_ref_sink (ret);
+ ret->display = wl_display_connect (name);
+
+ if (!ret->display) {
+ GST_ERROR ("Failed to open Wayland display connection with name, \'%s\'",
+ name);
+ return NULL;
+ }
+
+ /* connecting the listeners after attaching the event source will race with
+ * the source and the source may eat an event that we're waiting for and
+ * deadlock */
+ _connect_listeners (ret);
+
+ GST_VULKAN_DISPLAY (ret)->event_source =
+ wayland_event_source_new (ret->display, NULL);
+ g_source_attach (GST_VULKAN_DISPLAY (ret)->event_source,
+ GST_VULKAN_DISPLAY (ret)->main_context);
+
+ return ret;
+ }
+
+ /**
+ * gst_vulkan_display_wayland_new_with_display:
+ * @display: an existing, wayland display
+ *
+ * Creates a new display connection from a wl_display Display.
+ *
+ * Returns: (transfer full): a new #GstVulkanDisplayWayland
+ *
+ * Since: 1.18
+ */
+ GstVulkanDisplayWayland *
+ gst_vulkan_display_wayland_new_with_display (struct wl_display * display)
+ {
+ GstVulkanDisplayWayland *ret;
+
+ g_return_val_if_fail (display != NULL, NULL);
+
+ ret = g_object_new (GST_TYPE_VULKAN_DISPLAY_WAYLAND, NULL);
+ gst_object_ref_sink (ret);
+
+ ret->display = display;
+ ret->foreign_display = TRUE;
+
+ _connect_listeners (ret);
+
+ return ret;
+ }
+
+ static gpointer
+ gst_vulkan_display_wayland_get_handle (GstVulkanDisplay * display)
+ {
+ return GST_VULKAN_DISPLAY_WAYLAND (display)->display;
+ }
+
+ static gboolean
+ _roundtrip_async (gpointer data)
+ {
+ GstVulkanDisplayWayland *display = data;
+
+ wl_display_roundtrip (display->display);
+
+ return G_SOURCE_REMOVE;
+ }
+
+ void
+ gst_vulkan_display_wayland_roundtrip_async (GstVulkanDisplayWayland * display)
+ {
+ g_return_if_fail (GST_IS_VULKAN_DISPLAY_WAYLAND (display));
+
+ g_main_context_invoke (GST_VULKAN_DISPLAY (display)->main_context,
+ (GSourceFunc) _roundtrip_async, display);
+ }
--- /dev/null
- if (gst_caps_is_any (caps) || gst_caps_is_empty (caps))
+ /* GStreamer
+ *
+ * Copyright 2007-2012 Collabora Ltd
+ * @author: Olivier Crete <olivier.crete@collabora.com>
+ * Copyright 2007-2008 Nokia
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-autoconvert
+ * @title: autoconvert
+ *
+ * The #autoconvert element has one sink and one source pad. It will look for
+ * other elements that also have one sink and one source pad.
+ * It will then pick an element that matches the caps on both sides.
+ * If the caps change, it may change the selected element if the current one
+ * no longer matches the caps.
+ *
+ * The list of element it will look into can be specified in the
+ * #GstAutoConvert:factories property, otherwise it will look at all available
+ * elements.
+ */
+
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstautoconvert.h"
+
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY (autoconvert_debug);
+ #define GST_CAT_DEFAULT (autoconvert_debug)
+
+ #define GST_AUTOCONVERT_LOCK(ac) GST_OBJECT_LOCK (ac)
+ #define GST_AUTOCONVERT_UNLOCK(ac) GST_OBJECT_UNLOCK (ac)
+
+ /* elementfactory information */
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate sink_internal_template =
+ GST_STATIC_PAD_TEMPLATE ("sink_internal",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate src_internal_template =
+ GST_STATIC_PAD_TEMPLATE ("src_internal",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ /* GstAutoConvert signals and args */
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_FACTORIES
+ };
+
+ static void gst_auto_convert_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_auto_convert_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+ static void gst_auto_convert_dispose (GObject * object);
+
+ static GstElement *gst_auto_convert_get_subelement (GstAutoConvert *
+ autoconvert);
+ static GstPad *gst_auto_convert_get_internal_sinkpad (GstAutoConvert *
+ autoconvert);
+ static GstPad *gst_auto_convert_get_internal_srcpad (GstAutoConvert *
+ autoconvert);
+
+ static GstIterator *gst_auto_convert_iterate_internal_links (GstPad * pad,
+ GstObject * parent);
+
+ static gboolean gst_auto_convert_sink_setcaps (GstAutoConvert * autoconvert,
+ GstCaps * caps);
+ static GstCaps *gst_auto_convert_getcaps (GstAutoConvert * autoconvert,
+ GstCaps * filter, GstPadDirection dir);
+ static GstFlowReturn gst_auto_convert_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+ static GstFlowReturn gst_auto_convert_sink_chain_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+ static gboolean gst_auto_convert_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_auto_convert_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ static gboolean gst_auto_convert_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_auto_convert_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ static GstFlowReturn gst_auto_convert_internal_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+ static GstFlowReturn gst_auto_convert_internal_sink_chain_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+ static gboolean gst_auto_convert_internal_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_auto_convert_internal_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+ static gboolean gst_auto_convert_internal_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_auto_convert_internal_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+ static GList *gst_auto_convert_load_factories (GstAutoConvert * autoconvert);
+ static GstElement
+ * gst_auto_convert_get_or_make_element_from_factory (GstAutoConvert *
+ autoconvert, GstElementFactory * factory);
+ static gboolean gst_auto_convert_activate_element (GstAutoConvert * autoconvert,
+ GstElement * element, GstCaps * caps);
+
+ static GQuark internal_srcpad_quark = 0;
+ static GQuark internal_sinkpad_quark = 0;
+ static GQuark parent_quark = 0;
+
+ G_DEFINE_TYPE (GstAutoConvert, gst_auto_convert, GST_TYPE_BIN);
+ GST_ELEMENT_REGISTER_DEFINE (autoconvert, "autoconvert",
+ GST_RANK_NONE, GST_TYPE_AUTO_CONVERT);
+
+ static void
+ gst_auto_convert_class_init (GstAutoConvertClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (autoconvert_debug, "autoconvert", 0,
+ "Auto convert based on caps");
+
+ internal_srcpad_quark = g_quark_from_static_string ("internal_srcpad");
+ internal_sinkpad_quark = g_quark_from_static_string ("internal_sinkpad");
+ parent_quark = g_quark_from_static_string ("parent");
+
+
+ gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Select converter based on caps", "Generic/Bin",
+ "Selects the right transform element based on the caps",
+ "Olivier Crete <olivier.crete@collabora.com>");
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_auto_convert_dispose);
+
+ gobject_class->set_property = gst_auto_convert_set_property;
+ gobject_class->get_property = gst_auto_convert_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_FACTORIES,
+ g_param_spec_pointer ("factories",
+ "GList of GstElementFactory",
+ "GList of GstElementFactory objects to pick from (the element takes"
+ " ownership of the list (NULL means it will go through all possible"
+ " elements), can only be set once",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_auto_convert_init (GstAutoConvert * autoconvert)
+ {
+ autoconvert->sinkpad =
+ gst_pad_new_from_static_template (&sinktemplate, "sink");
+ autoconvert->srcpad = gst_pad_new_from_static_template (&srctemplate, "src");
+
+ gst_pad_set_chain_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_sink_chain));
+ gst_pad_set_chain_list_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_sink_chain_list));
+ gst_pad_set_event_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_sink_event));
+ gst_pad_set_query_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_sink_query));
+ gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
+
+ gst_pad_set_event_function (autoconvert->srcpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_src_event));
+ gst_pad_set_query_function (autoconvert->srcpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_src_query));
+ gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
+
+ gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->srcpad);
+ }
+
+ static void
+ gst_auto_convert_dispose (GObject * object)
+ {
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (object);
+
+ g_clear_object (&autoconvert->current_subelement);
+ g_clear_object (&autoconvert->current_internal_sinkpad);
+ g_clear_object (&autoconvert->current_internal_srcpad);
+
+ for (;;) {
+ GList *factories = g_atomic_pointer_get (&autoconvert->factories);
+
+ if (g_atomic_pointer_compare_and_exchange (&autoconvert->factories,
+ factories, NULL)) {
+ gst_plugin_feature_list_free (factories);
+ break;
+ }
+ }
+
+ G_OBJECT_CLASS (gst_auto_convert_parent_class)->dispose (object);
+ }
+
+ static void
+ gst_auto_convert_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (object);
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ case PROP_FACTORIES:
+ if (g_atomic_pointer_get (&autoconvert->factories) == NULL) {
+ GList *factories = g_value_get_pointer (value);
+ factories = g_list_copy (factories);
+ if (g_atomic_pointer_compare_and_exchange (&autoconvert->factories,
+ (GList *) NULL, factories))
+ g_list_foreach (factories, (GFunc) g_object_ref, NULL);
+ else
+ g_list_free (factories);
+ } else {
+ GST_WARNING_OBJECT (object, "Can not reset factories after they"
+ " have been set or auto-discovered");
+ }
+ break;
+ }
+ }
+
+ static void
+ gst_auto_convert_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (object);
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ case PROP_FACTORIES:
+ g_value_set_pointer (value,
+ g_atomic_pointer_get (&autoconvert->factories));
+ break;
+ }
+ }
+
+
+ static GstElement *
+ gst_auto_convert_get_element_by_type (GstAutoConvert * autoconvert, GType type)
+ {
+ GList *item;
+ GstBin *bin = GST_BIN (autoconvert);
+ GstElement *element = NULL;
+
+ g_return_val_if_fail (type != 0, NULL);
+
+ GST_OBJECT_LOCK (autoconvert);
+
+ for (item = bin->children; item; item = item->next) {
+ if (G_TYPE_CHECK_INSTANCE_TYPE (item->data, type)) {
+ element = gst_object_ref (item->data);
+ break;
+ }
+ }
+
+ GST_OBJECT_UNLOCK (autoconvert);
+
+ return element;
+ }
+
+ /**
+ * get_pad_by_direction:
+ * @element: The Element
+ * @direction: The direction
+ *
+ * Gets a #GstPad that goes in the requested direction. I will return NULL
+ * if there is no pad or if there is more than one pad in this direction
+ */
+
+ static GstPad *
+ get_pad_by_direction (GstElement * element, GstPadDirection direction)
+ {
+ GstIterator *iter = gst_element_iterate_pads (element);
+ GstPad *selected_pad = NULL;
+ gboolean done;
+ GValue item = { 0, };
+
+ if (!iter)
+ return NULL;
+
+ done = FALSE;
+ while (!done) {
+ switch (gst_iterator_next (iter, &item)) {
+ case GST_ITERATOR_OK:
+ {
+ GstPad *pad = g_value_get_object (&item);
+
+ if (gst_pad_get_direction (pad) == direction) {
+ /* We check if there is more than one pad in this direction,
+ * if there is, we return NULL so that the element is refused
+ */
+ if (selected_pad) {
+ done = TRUE;
+ gst_object_unref (selected_pad);
+ selected_pad = NULL;
+ } else {
+ selected_pad = g_object_ref (pad);
+ }
+ }
+ g_value_unset (&item);
+ }
+ break;
+ case GST_ITERATOR_RESYNC:
+ if (selected_pad) {
+ gst_object_unref (selected_pad);
+ selected_pad = NULL;
+ }
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_ERROR:
+ GST_ERROR ("Error iterating pads of element %s",
+ GST_OBJECT_NAME (element));
+ gst_object_unref (selected_pad);
+ selected_pad = NULL;
+ done = TRUE;
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ g_value_unset (&item);
+ gst_iterator_free (iter);
+
+ if (!selected_pad)
+ GST_ERROR ("Did not find pad of direction %d in %s",
+ direction, GST_OBJECT_NAME (element));
+
+ return selected_pad;
+ }
+
+ static GstElement *
+ gst_auto_convert_get_subelement (GstAutoConvert * autoconvert)
+ {
+ GstElement *element = NULL;
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ if (autoconvert->current_subelement)
+ element = gst_object_ref (autoconvert->current_subelement);
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ return element;
+ }
+
+ static GstPad *
+ gst_auto_convert_get_internal_sinkpad (GstAutoConvert * autoconvert)
+ {
+ GstPad *pad = NULL;
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ if (autoconvert->current_internal_sinkpad)
+ pad = gst_object_ref (autoconvert->current_internal_sinkpad);
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ return pad;
+ }
+
+ static GstPad *
+ gst_auto_convert_get_internal_srcpad (GstAutoConvert * autoconvert)
+ {
+ GstPad *pad = NULL;
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ if (autoconvert->current_internal_srcpad)
+ pad = gst_object_ref (autoconvert->current_internal_srcpad);
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ return pad;
+ }
+
+ /*
+ * This function creates and adds an element to the GstAutoConvert
+ * it then creates the internal pads and links them
+ *
+ */
+
+ static GstElement *
+ gst_auto_convert_add_element (GstAutoConvert * autoconvert,
+ GstElementFactory * factory)
+ {
+ GstElement *element = NULL;
+ GstPad *internal_sinkpad = NULL;
+ GstPad *internal_srcpad = NULL;
+ GstPad *sinkpad = NULL;
+ GstPad *srcpad = NULL;
+ GstPadLinkReturn padlinkret;
+
+ GST_DEBUG_OBJECT (autoconvert, "Adding element %s to the autoconvert bin",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+
+ element = gst_element_factory_create (factory, NULL);
+ if (!element)
+ return NULL;
+
+ if (!gst_bin_add (GST_BIN (autoconvert), element)) {
+ GST_ERROR_OBJECT (autoconvert, "Could not add element %s to the bin",
+ GST_OBJECT_NAME (element));
+ gst_object_unref (element);
+ return NULL;
+ }
+
+ srcpad = get_pad_by_direction (element, GST_PAD_SRC);
+ if (!srcpad) {
+ GST_ERROR_OBJECT (autoconvert, "Could not find source in %s",
+ GST_OBJECT_NAME (element));
+ goto error;
+ }
+
+ sinkpad = get_pad_by_direction (element, GST_PAD_SINK);
+ if (!sinkpad) {
+ GST_ERROR_OBJECT (autoconvert, "Could not find sink in %s",
+ GST_OBJECT_NAME (element));
+ goto error;
+ }
+
+ internal_sinkpad =
+ gst_pad_new_from_static_template (&sink_internal_template,
+ "sink_internal");
+ internal_srcpad =
+ gst_pad_new_from_static_template (&src_internal_template, "src_internal");
+
+ if (!internal_sinkpad || !internal_srcpad) {
+ GST_ERROR_OBJECT (autoconvert, "Could not create internal pads");
+ if (internal_srcpad)
+ gst_object_unref (internal_srcpad);
+ if (internal_sinkpad)
+ gst_object_unref (internal_sinkpad);
+ goto error;
+ }
+
+ g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref,
+ internal_sinkpad);
+ g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref,
+ internal_srcpad);
+
+ gst_pad_set_active (internal_sinkpad, TRUE);
+ gst_pad_set_active (internal_srcpad, TRUE);
+
+ g_object_set_qdata (G_OBJECT (internal_srcpad), parent_quark, autoconvert);
+ g_object_set_qdata (G_OBJECT (internal_sinkpad), parent_quark, autoconvert);
+
+ gst_pad_set_chain_function (internal_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_chain));
+ gst_pad_set_chain_list_function (internal_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_chain_list));
+ gst_pad_set_event_function (internal_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_event));
+ gst_pad_set_query_function (internal_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_query));
+
+ gst_pad_set_event_function (internal_srcpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_event));
+ gst_pad_set_query_function (internal_srcpad,
+ GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_query));
+
+ padlinkret = gst_pad_link_full (internal_srcpad, sinkpad,
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (GST_PAD_LINK_FAILED (padlinkret)) {
+ GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s"
+ " for reason %d",
+ GST_DEBUG_PAD_NAME (internal_srcpad),
+ GST_DEBUG_PAD_NAME (sinkpad), padlinkret);
+ goto error;
+ }
+
+ padlinkret = gst_pad_link_full (srcpad, internal_sinkpad,
+ GST_PAD_LINK_CHECK_NOTHING);
+ if (GST_PAD_LINK_FAILED (padlinkret)) {
+ GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s"
+ " for reason %d",
+ GST_DEBUG_PAD_NAME (internal_srcpad),
+ GST_DEBUG_PAD_NAME (sinkpad), padlinkret);
+ goto error;
+ }
+
+ g_object_set_qdata (G_OBJECT (element),
+ internal_srcpad_quark, internal_srcpad);
+ g_object_set_qdata (G_OBJECT (element),
+ internal_sinkpad_quark, internal_sinkpad);
+
+ /* Iffy */
+ gst_element_sync_state_with_parent (element);
+
+ /* Increment the reference count we will return to the caller */
+ gst_object_ref (element);
+
+ /* unref sink and src pad */
+ gst_object_unref (srcpad);
+ gst_object_unref (sinkpad);
+ return element;
+
+ error:
+ gst_element_set_locked_state (element, TRUE);
+ gst_element_set_state (element, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (autoconvert), element);
+
+ if (srcpad)
+ gst_object_unref (srcpad);
+ if (sinkpad)
+ gst_object_unref (sinkpad);
+
+ return NULL;
+ }
+
+ static GstElement *
+ gst_auto_convert_get_or_make_element_from_factory (GstAutoConvert * autoconvert,
+ GstElementFactory * factory)
+ {
+ GstElement *element = NULL;
+ GstElementFactory *loaded_factory =
+ GST_ELEMENT_FACTORY (gst_plugin_feature_load (GST_PLUGIN_FEATURE
+ (factory)));
+
+ if (!loaded_factory)
+ return NULL;
+
+ element = gst_auto_convert_get_element_by_type (autoconvert,
+ gst_element_factory_get_element_type (loaded_factory));
+
+ if (!element) {
+ element = gst_auto_convert_add_element (autoconvert, loaded_factory);
+ }
+
+ gst_object_unref (loaded_factory);
+
+ return element;
+ }
+
+ /*
+ * This function checks if there is one and only one pad template on the
+ * factory that can accept the given caps. If there is one and only one,
+ * it returns TRUE, otherwise, its FALSE
+ */
+
+ static gboolean
+ factory_can_intersect (GstAutoConvert * autoconvert,
+ GstElementFactory * factory, GstPadDirection direction, GstCaps * caps)
+ {
+ const GList *templates;
+ gint has_direction = FALSE;
+ gboolean ret = FALSE;
+
+ g_return_val_if_fail (factory != NULL, FALSE);
+ g_return_val_if_fail (caps != NULL, FALSE);
+
+ templates = gst_element_factory_get_static_pad_templates (factory);
+
+ while (templates) {
+ GstStaticPadTemplate *template = (GstStaticPadTemplate *) templates->data;
+
+ if (template->direction == direction) {
+ GstCaps *tmpl_caps = NULL;
+ gboolean intersect;
+
+ /* If there is more than one pad in this direction, we return FALSE
+ * Only transform elements (with one sink and one source pad)
+ * are accepted
+ */
+ if (has_direction) {
+ GST_DEBUG_OBJECT (autoconvert, "Factory %p"
+ " has more than one static template with dir %d",
+ template, direction);
+ return FALSE;
+ }
+ has_direction = TRUE;
+
+ tmpl_caps = gst_static_caps_get (&template->static_caps);
+ intersect = gst_caps_can_intersect (tmpl_caps, caps);
+ GST_DEBUG_OBJECT (autoconvert, "Factories %" GST_PTR_FORMAT
+ " static caps %" GST_PTR_FORMAT " and caps %" GST_PTR_FORMAT
+ " can%s intersect", factory, tmpl_caps, caps,
+ intersect ? "" : " not");
+ gst_caps_unref (tmpl_caps);
+
+ ret |= intersect;
+ }
+ templates = g_list_next (templates);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ sticky_event_push (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (user_data);
+
+ gst_event_ref (*event);
+ gst_pad_push_event (autoconvert->current_internal_srcpad, *event);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_auto_convert_activate_element (GstAutoConvert * autoconvert,
+ GstElement * element, GstCaps * caps)
+ {
+ GstPad *internal_srcpad = g_object_get_qdata (G_OBJECT (element),
+ internal_srcpad_quark);
+ GstPad *internal_sinkpad = g_object_get_qdata (G_OBJECT (element),
+ internal_sinkpad_quark);
+
+ if (caps) {
+ /* check if the element can really accept said caps */
+ if (!gst_pad_peer_query_accept_caps (internal_srcpad, caps)) {
+ GST_DEBUG_OBJECT (autoconvert, "Could not set %s:%s to %"
+ GST_PTR_FORMAT, GST_DEBUG_PAD_NAME (internal_srcpad), caps);
+ return FALSE;
+ }
+ }
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ gst_object_replace ((GstObject **) & autoconvert->current_subelement,
+ GST_OBJECT (element));
+ gst_object_replace ((GstObject **) & autoconvert->current_internal_srcpad,
+ GST_OBJECT (internal_srcpad));
+ gst_object_replace ((GstObject **) & autoconvert->current_internal_sinkpad,
+ GST_OBJECT (internal_sinkpad));
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ gst_pad_sticky_events_foreach (autoconvert->sinkpad, sticky_event_push,
+ autoconvert);
+
+ gst_pad_push_event (autoconvert->sinkpad, gst_event_new_reconfigure ());
+
+ GST_INFO_OBJECT (autoconvert, "Selected element %s",
+ GST_OBJECT_NAME (GST_OBJECT (element)));
+
+ gst_object_unref (element);
+
+ return TRUE;
+ }
+
+ static GstIterator *
+ gst_auto_convert_iterate_internal_links (GstPad * pad, GstObject * parent)
+ {
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+ GstIterator *it = NULL;
+ GstPad *internal;
+
+ if (pad == autoconvert->sinkpad)
+ internal = gst_auto_convert_get_internal_srcpad (autoconvert);
+ else
+ internal = gst_auto_convert_get_internal_sinkpad (autoconvert);
+
+ if (internal) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_take_object (&val, internal);
+
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ }
+
+ return it;
+ }
+
+ /*
+ * If there is already an internal element, it will try to call set_caps on it
+ *
+ * If there isn't an internal element or if the set_caps() on the internal
+ * element failed, it will try to find another element where it would succeed
+ * and will change the internal element.
+ */
+
+ static gboolean
+ gst_auto_convert_sink_setcaps (GstAutoConvert * autoconvert, GstCaps * caps)
+ {
+ GList *elem;
+ GstCaps *other_caps = NULL;
+ GList *factories;
+ GstCaps *current_caps;
+
+ g_return_val_if_fail (autoconvert != NULL, FALSE);
+
+ current_caps = gst_pad_get_current_caps (autoconvert->sinkpad);
+ if (current_caps) {
+ if (gst_caps_is_equal_fixed (caps, current_caps)) {
+ gst_caps_unref (current_caps);
+ return TRUE;
+ }
+ gst_caps_unref (current_caps);
+ }
+
+ if (autoconvert->current_subelement) {
+ if (gst_pad_peer_query_accept_caps (autoconvert->current_internal_srcpad,
+ caps)) {
+ /* If we can set the new caps on the current element,
+ * then we just get out
+ */
+ GST_DEBUG_OBJECT (autoconvert, "Could set %s:%s to %" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (autoconvert->current_internal_srcpad), caps);
+ goto get_out;
+ } else {
+ /* If the current element doesn't work,
+ * then we remove the current element before finding a new one.
+ */
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ g_clear_object (&autoconvert->current_subelement);
+ g_clear_object (&autoconvert->current_internal_sinkpad);
+ g_clear_object (&autoconvert->current_internal_srcpad);
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+ }
+ }
+
+ other_caps = gst_pad_peer_query_caps (autoconvert->srcpad, NULL);
+
+ factories = g_atomic_pointer_get (&autoconvert->factories);
+
+ if (!factories)
+ factories = gst_auto_convert_load_factories (autoconvert);
+
+ for (elem = factories; elem; elem = g_list_next (elem)) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY (elem->data);
+ GstElement *element;
+
+ /* Lets first check if according to the static pad templates on the factory
+ * these caps have any chance of success
+ */
+ if (!factory_can_intersect (autoconvert, factory, GST_PAD_SINK, caps)) {
+ GST_LOG_OBJECT (autoconvert, "Factory %s does not accept sink caps %"
+ GST_PTR_FORMAT,
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), caps);
+ continue;
+ }
+ if (other_caps != NULL) {
+ if (!factory_can_intersect (autoconvert, factory, GST_PAD_SRC,
+ other_caps)) {
+ GST_LOG_OBJECT (autoconvert,
+ "Factory %s does not accept src caps %" GST_PTR_FORMAT,
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)),
+ other_caps);
+ continue;
+ }
+ }
+
+ /* The element had a chance of success, lets make it */
+ element =
+ gst_auto_convert_get_or_make_element_from_factory (autoconvert,
+ factory);
+ if (!element)
+ continue;
+
+ /* And make it the current child */
+ if (gst_auto_convert_activate_element (autoconvert, element, caps))
+ break;
+ else
+ gst_object_unref (element);
+ }
+
+ get_out:
+ if (other_caps)
+ gst_caps_unref (other_caps);
+
+ if (autoconvert->current_subelement) {
+ return TRUE;
+ } else {
+ GST_WARNING_OBJECT (autoconvert,
+ "Could not find a matching element for caps");
+ return FALSE;
+ }
+ }
+
+ /*
+ * This function filters the pad pad templates, taking only transform element
+ * (with one sink and one src pad)
+ */
+
+ static gboolean
+ gst_auto_convert_default_filter_func (GstPluginFeature * feature,
+ gpointer user_data)
+ {
+ GstElementFactory *factory = NULL;
+ const GList *static_pad_templates, *tmp;
+ GstStaticPadTemplate *src = NULL, *sink = NULL;
+
+ if (!GST_IS_ELEMENT_FACTORY (feature))
+ return FALSE;
+
+ factory = GST_ELEMENT_FACTORY (feature);
+
+ static_pad_templates = gst_element_factory_get_static_pad_templates (factory);
+
+ for (tmp = static_pad_templates; tmp; tmp = g_list_next (tmp)) {
+ GstStaticPadTemplate *template = tmp->data;
+ GstCaps *caps;
+
+ if (template->presence == GST_PAD_SOMETIMES)
+ return FALSE;
+
+ if (template->presence != GST_PAD_ALWAYS)
+ continue;
+
+ switch (template->direction) {
+ case GST_PAD_SRC:
+ if (src)
+ return FALSE;
+ src = template;
+ break;
+ case GST_PAD_SINK:
+ if (sink)
+ return FALSE;
+ sink = template;
+ break;
+ default:
+ return FALSE;
+ }
+
+ caps = gst_static_pad_template_get_caps (template);
+
++ if (gst_caps_is_any (caps) || gst_caps_is_empty (caps)) {
++ gst_caps_unref(caps);
+ return FALSE;
++ }
++ gst_caps_unref(caps);
+ }
+
+ if (!src || !sink)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ /* function used to sort element features
+ * Copy-pasted from decodebin */
+ static gint
+ compare_ranks (GstPluginFeature * f1, GstPluginFeature * f2)
+ {
+ gint diff;
+ const gchar *rname1, *rname2;
+
+ diff = gst_plugin_feature_get_rank (f2) - gst_plugin_feature_get_rank (f1);
+ if (diff != 0)
+ return diff;
+
+ rname1 = gst_plugin_feature_get_name (f1);
+ rname2 = gst_plugin_feature_get_name (f2);
+
+ diff = strcmp (rname2, rname1);
+
+ return diff;
+ }
+
+ static GList *
+ gst_auto_convert_load_factories (GstAutoConvert * autoconvert)
+ {
+ GList *all_factories;
+
+ all_factories =
+ gst_registry_feature_filter (gst_registry_get (),
+ gst_auto_convert_default_filter_func, FALSE, NULL);
+
+ all_factories = g_list_sort (all_factories, (GCompareFunc) compare_ranks);
+
+ g_assert (all_factories);
+
+ if (!g_atomic_pointer_compare_and_exchange (&autoconvert->factories,
+ (GList *) NULL, all_factories)) {
+ gst_plugin_feature_list_free (all_factories);
+ }
+
+ return g_atomic_pointer_get (&autoconvert->factories);
+ }
+
+ /* In this case, we should almost always have an internal element, because
+ * set_caps() should have been called first
+ */
+
+ static GstFlowReturn
+ gst_auto_convert_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_NOT_NEGOTIATED;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+
+ if (autoconvert->current_internal_srcpad) {
+ ret = gst_pad_push (autoconvert->current_internal_srcpad, buffer);
+ if (ret != GST_FLOW_OK)
+ GST_DEBUG_OBJECT (autoconvert,
+ "Child element %" GST_PTR_FORMAT "returned flow %s",
+ autoconvert->current_subelement, gst_flow_get_name (ret));
+ } else {
+ GST_ERROR_OBJECT (autoconvert, "Got buffer without an negotiated element,"
+ " returning not-negotiated");
+ gst_buffer_unref (buffer);
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_auto_convert_sink_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+ {
+ GstFlowReturn ret = GST_FLOW_NOT_NEGOTIATED;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+
+ if (autoconvert->current_internal_srcpad) {
+ ret = gst_pad_push_list (autoconvert->current_internal_srcpad, list);
+ if (ret != GST_FLOW_OK)
+ GST_DEBUG_OBJECT (autoconvert,
+ "Child element %" GST_PTR_FORMAT "returned flow %s",
+ autoconvert->current_subelement, gst_flow_get_name (ret));
+ } else {
+ GST_ERROR_OBJECT (autoconvert, "Got buffer without an negotiated element,"
+ " returning not-negotiated");
+ gst_buffer_list_unref (list);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_auto_convert_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean ret = TRUE;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+ GstPad *internal_srcpad;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_auto_convert_sink_setcaps (autoconvert, caps);
+ if (!ret) {
+ gst_event_unref (event);
+ return ret;
+ }
+ }
+
+ internal_srcpad = gst_auto_convert_get_internal_srcpad (autoconvert);
+ if (internal_srcpad) {
+ ret = gst_pad_push_event (internal_srcpad, event);
+ gst_object_unref (internal_srcpad);
+ } else {
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ case GST_EVENT_FLUSH_START:
+ ret = gst_pad_push_event (autoconvert->srcpad, event);
+ break;
+ default:
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ /* TODO Properly test that this code works well for queries */
+ static gboolean
+ gst_auto_convert_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean ret = TRUE;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+ GstElement *subelement;
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_CAPS) {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_auto_convert_getcaps (autoconvert, filter, GST_PAD_SINK);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+
+ return TRUE;
+ }
+
+ subelement = gst_auto_convert_get_subelement (autoconvert);
+ if (subelement) {
+ GstPad *sub_sinkpad = get_pad_by_direction (subelement, GST_PAD_SINK);
+
+ ret = gst_pad_query (sub_sinkpad, query);
+
+ gst_object_unref (sub_sinkpad);
+ gst_object_unref (subelement);
+
+ if (ret && GST_QUERY_TYPE (query) == GST_QUERY_ACCEPT_CAPS) {
+ gboolean res;
+ gst_query_parse_accept_caps_result (query, &res);
+
+ if (!res)
+ goto ignore_acceptcaps_failure;
+ }
+ return ret;
+ }
+
+ ignore_acceptcaps_failure:
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_ACCEPT_CAPS) {
+ GstCaps *caps;
+ GstCaps *accept_caps;
+
+ gst_query_parse_accept_caps (query, &accept_caps);
+
+ caps = gst_auto_convert_getcaps (autoconvert, accept_caps, GST_PAD_SINK);
+ gst_query_set_accept_caps_result (query,
+ gst_caps_can_intersect (caps, accept_caps));
+ gst_caps_unref (caps);
+
+ return TRUE;
+ }
+
+ GST_WARNING_OBJECT (autoconvert, "Got query %s while no element was"
+ " selected, letting through",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+ return gst_pad_peer_query (autoconvert->srcpad, query);
+ }
+
+ /**
+ * gst_auto_convert_getcaps:
+ * @pad: the sink #GstPad
+ *
+ * This function returns the union of the caps of all the possible element
+ * factories, based on the static pad templates.
+ * It also checks does a getcaps on the downstream element and ignores all
+ * factories whose static caps can not satisfy it.
+ *
+ * It does not try to use each elements getcaps() function
+ */
+
+ static GstCaps *
+ gst_auto_convert_getcaps (GstAutoConvert * autoconvert, GstCaps * filter,
+ GstPadDirection dir)
+ {
+ GstCaps *caps = NULL, *other_caps = NULL;
+ GList *elem, *factories;
+
+ caps = gst_caps_new_empty ();
+
+ if (dir == GST_PAD_SINK)
+ other_caps = gst_pad_peer_query_caps (autoconvert->srcpad, NULL);
+ else
+ other_caps = gst_pad_peer_query_caps (autoconvert->sinkpad, NULL);
+
+ GST_DEBUG_OBJECT (autoconvert,
+ "Lets find all the element that can fit here with src caps %"
+ GST_PTR_FORMAT, other_caps);
+
+ if (other_caps && gst_caps_is_empty (other_caps)) {
+ goto out;
+ }
+
+ factories = g_atomic_pointer_get (&autoconvert->factories);
+
+ if (!factories)
+ factories = gst_auto_convert_load_factories (autoconvert);
+
+ for (elem = factories; elem; elem = g_list_next (elem)) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY (elem->data);
+ GstElement *element = NULL;
+ GstCaps *element_caps;
+ GstPad *internal_pad = NULL;
+
+ if (filter) {
+ if (!factory_can_intersect (autoconvert, factory, dir, filter)) {
+ GST_LOG_OBJECT (autoconvert,
+ "Factory %s does not accept src caps %" GST_PTR_FORMAT,
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)),
+ other_caps);
+ continue;
+ }
+ }
+
+ if (other_caps != NULL) {
+ if (!factory_can_intersect (autoconvert, factory,
+ dir == GST_PAD_SINK ? GST_PAD_SRC : GST_PAD_SINK, other_caps)) {
+ GST_LOG_OBJECT (autoconvert,
+ "Factory %s does not accept src caps %" GST_PTR_FORMAT,
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)),
+ other_caps);
+ continue;
+ }
+
+ element = gst_auto_convert_get_or_make_element_from_factory (autoconvert,
+ factory);
+ if (element == NULL)
+ continue;
+
+ if (dir == GST_PAD_SINK)
+ internal_pad = g_object_get_qdata (G_OBJECT (element),
+ internal_srcpad_quark);
+ else
+ internal_pad = g_object_get_qdata (G_OBJECT (element),
+ internal_sinkpad_quark);
+
+ element_caps = gst_pad_peer_query_caps (internal_pad, filter);
+
+ if (element_caps)
+ caps = gst_caps_merge (caps, element_caps);
+
+ gst_object_unref (element);
+
+ /* Early out, any is absorbing */
+ if (gst_caps_is_any (caps))
+ goto out;
+ } else {
+ const GList *tmp;
+
+ for (tmp = gst_element_factory_get_static_pad_templates (factory);
+ tmp; tmp = g_list_next (tmp)) {
+ GstStaticPadTemplate *template = tmp->data;
+
+ if (GST_PAD_TEMPLATE_DIRECTION (template) == dir) {
+ GstCaps *static_caps = gst_static_pad_template_get_caps (template);
+
+ if (static_caps) {
+ caps = gst_caps_merge (caps, static_caps);
+ }
+
+ /* Early out, any is absorbing */
+ if (gst_caps_is_any (caps))
+ goto out;
+ }
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (autoconvert, "Returning unioned caps %" GST_PTR_FORMAT,
+ caps);
+
+ out:
+
+ if (other_caps)
+ gst_caps_unref (other_caps);
+
+ return caps;
+ }
+
+
+
+ static gboolean
+ gst_auto_convert_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean ret = TRUE;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+ GstPad *internal_sinkpad;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_RECONFIGURE)
+ gst_pad_push_event (autoconvert->sinkpad, gst_event_ref (event));
+
+ internal_sinkpad = gst_auto_convert_get_internal_sinkpad (autoconvert);
+ if (internal_sinkpad) {
+ ret = gst_pad_push_event (internal_sinkpad, event);
+ gst_object_unref (internal_sinkpad);
+ } else if (GST_EVENT_TYPE (event) != GST_EVENT_RECONFIGURE) {
+ GST_WARNING_OBJECT (autoconvert,
+ "Got upstream event while no element was selected," "forwarding.");
+ ret = gst_pad_push_event (autoconvert->sinkpad, event);
+ } else
+ gst_event_unref (event);
+
+ return ret;
+ }
+
+ /* TODO Properly test that this code works well for queries */
+ static gboolean
+ gst_auto_convert_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean ret = TRUE;
+ GstAutoConvert *autoconvert = GST_AUTO_CONVERT (parent);
+ GstElement *subelement;
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_CAPS) {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_auto_convert_getcaps (autoconvert, filter, GST_PAD_SRC);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+
+ return TRUE;
+ }
+
+ subelement = gst_auto_convert_get_subelement (autoconvert);
+ if (subelement) {
+ GstPad *sub_srcpad = get_pad_by_direction (subelement, GST_PAD_SRC);
+
+ ret = gst_pad_query (sub_srcpad, query);
+
+ gst_object_unref (sub_srcpad);
+ gst_object_unref (subelement);
+ } else {
+ GST_WARNING_OBJECT (autoconvert,
+ "Got upstream query of type %s while no element was selected,"
+ " forwarding.", gst_query_type_get_name (GST_QUERY_TYPE (query)));
+ ret = gst_pad_peer_query (autoconvert->sinkpad, query);
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_auto_convert_internal_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+
+ return gst_pad_push (autoconvert->srcpad, buffer);
+ }
+
+ static GstFlowReturn
+ gst_auto_convert_internal_sink_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+
+ return gst_pad_push_list (autoconvert->srcpad, list);
+ }
+
+ static gboolean
+ gst_auto_convert_internal_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+ gboolean drop = FALSE;
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ if (autoconvert->current_internal_sinkpad != pad) {
+ drop = TRUE;
+ }
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ if (drop) {
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ return gst_pad_push_event (autoconvert->srcpad, event);
+ }
+
+ static gboolean
+ gst_auto_convert_internal_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+
+ if (!gst_pad_peer_query (autoconvert->srcpad, query)) {
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter;
+
+ gst_query_parse_caps (query, &filter);
+ if (filter) {
+ gst_query_set_caps_result (query, filter);
+ } else {
+ filter = gst_caps_new_any ();
+ gst_query_set_caps_result (query, filter);
+ gst_caps_unref (filter);
+ }
+ return TRUE;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ gst_query_set_accept_caps_result (query, TRUE);
+ return TRUE;
+ default:
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_auto_convert_internal_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+ gboolean drop = FALSE;
+
+ GST_AUTOCONVERT_LOCK (autoconvert);
+ if (autoconvert->current_internal_srcpad != pad) {
+ drop = TRUE;
+ }
+ GST_AUTOCONVERT_UNLOCK (autoconvert);
+
+ if (drop) {
+ GST_DEBUG_OBJECT (autoconvert, "Dropping event %" GST_PTR_FORMAT, event);
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ return gst_pad_push_event (autoconvert->sinkpad, event);
+ }
+
+ static gboolean
+ gst_auto_convert_internal_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstAutoConvert *autoconvert =
+ GST_AUTO_CONVERT (g_object_get_qdata (G_OBJECT (pad),
+ parent_quark));
+
+ return gst_pad_peer_query (autoconvert->sinkpad, query);
+ }
--- /dev/null
-/*
++/*
+ * GStreamer
+ * Copyright (C) 2007 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ * March 2008
+ * Logic enhanced by William Brack <wbrack@mmm.com.hk>
+ */
+
+ /**
+ * SECTION:element-bayer2rgb
+ * @title: bayer2rgb
+ *
+ * Decodes raw camera bayer (fourcc BA81) to RGB.
+ */
+
+ /*
+ * In order to guard against my advancing maturity, some extra detailed
+ * information about the logic of the decode is included here. Much of
+ * this was inspired by a technical paper from siliconimaging.com, which
+ * in turn was based upon an article from IEEE,
+ * T. Sakamoto, C. Nakanishi and T. Hase,
+ * “Software pixel interpolation for digital still cameras suitable for
+ * a 32-bit MCU,”
+ * IEEE Trans. Consumer Electronics, vol. 44, no. 4, November 1998.
+ *
+ * The code assumes a Bayer matrix of the type produced by the fourcc
+ * BA81 (v4l2 format SBGGR8) of width w and height h which looks like:
+ * 0 1 2 3 w-2 w-1
+ *
+ * 0 B G B G ....B G
+ * 1 G R G R ....G R
+ * 2 B G B G ....B G
+ * ...............
+ * h-2 B G B G ....B G
+ * h-1 G R G R ....G R
+ *
+ * We expand this matrix, producing a separate {r, g, b} triple for each
+ * of the individual elements. The algorithm for doing this expansion is
+ * as follows.
+ *
+ * We are designing for speed of transformation, at a slight expense of code.
+ * First, we calculate the appropriate triples for the four corners, the
+ * remainder of the top and bottom rows, and the left and right columns.
+ * The reason for this is that those elements are transformed slightly
+ * differently than all of the remainder of the matrix. Finally, we transform
+ * all of the remainder.
+ *
+ * The transformation into the "appropriate triples" is based upon the
+ * "nearest neighbor" principal, with some additional complexity for the
+ * calculation of the "green" element, where an "adaptive" pairing is used.
+ *
+ * For purposes of documentation and identification, each element of the
+ * original array can be put into one of four classes:
+ * R A red element
+ * B A blue element
+ * GR A green element which is followed by a red one
+ * GB A green element which is followed by a blue one
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbasetransform.h>
+ #include <gst/video/video.h>
+ #include <string.h>
+ #include <stdlib.h>
+
+ #ifdef HAVE_STDINT_H
+ #include <stdint.h>
+ #endif
+
+ #include "gstbayerelements.h"
+ #include "gstbayerorc.h"
+
+ #define GST_CAT_DEFAULT gst_bayer2rgb_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ enum
+ {
+ GST_BAYER_2_RGB_FORMAT_BGGR = 0,
+ GST_BAYER_2_RGB_FORMAT_GBRG,
+ GST_BAYER_2_RGB_FORMAT_GRBG,
+ GST_BAYER_2_RGB_FORMAT_RGGB
+ };
+
+
+ #define GST_TYPE_BAYER2RGB (gst_bayer2rgb_get_type())
+ #define GST_BAYER2RGB(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BAYER2RGB,GstBayer2RGB))
+ #define GST_IS_BAYER2RGB(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BAYER2RGB))
+ #define GST_BAYER2RGB_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_BAYER2RGB,GstBayer2RGBClass))
+ #define GST_IS_BAYER2RGB_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_BAYER2RGB))
+ #define GST_BAYER2RGB_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_BAYER2RGB,GstBayer2RGBClass))
+ typedef struct _GstBayer2RGB GstBayer2RGB;
+ typedef struct _GstBayer2RGBClass GstBayer2RGBClass;
+
+ typedef void (*GstBayer2RGBProcessFunc) (GstBayer2RGB *, guint8 *, guint);
+
+ struct _GstBayer2RGB
+ {
+ GstBaseTransform basetransform;
+
+ /* < private > */
+ GstVideoInfo info;
+ int width;
+ int height;
+ int r_off; /* offset for red */
+ int g_off; /* offset for green */
+ int b_off; /* offset for blue */
+ int format;
+ };
+
+ struct _GstBayer2RGBClass
+ {
+ GstBaseTransformClass parent;
+ };
+
+ #define SRC_CAPS \
+ GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR, RGBA, ARGB, BGRA, ABGR }")
+
+ #define SINK_CAPS "video/x-bayer,format=(string){bggr,grbg,gbrg,rggb}," \
+ "width=(int)[1,MAX],height=(int)[1,MAX],framerate=(fraction)[0/1,MAX]"
+
+ enum
+ {
+ PROP_0
+ };
+
+ GType gst_bayer2rgb_get_type (void);
+
+ #define gst_bayer2rgb_parent_class parent_class
+ G_DEFINE_TYPE (GstBayer2RGB, gst_bayer2rgb, GST_TYPE_BASE_TRANSFORM);
+ GST_ELEMENT_REGISTER_DEFINE (bayer2rgb, "bayer2rgb", GST_RANK_NONE,
+ gst_bayer2rgb_get_type ());
+
+ static void gst_bayer2rgb_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_bayer2rgb_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static gboolean gst_bayer2rgb_set_caps (GstBaseTransform * filter,
+ GstCaps * incaps, GstCaps * outcaps);
+ static GstFlowReturn gst_bayer2rgb_transform (GstBaseTransform * base,
+ GstBuffer * inbuf, GstBuffer * outbuf);
+ static void gst_bayer2rgb_reset (GstBayer2RGB * filter);
+ static GstCaps *gst_bayer2rgb_transform_caps (GstBaseTransform * base,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
+ static gboolean gst_bayer2rgb_get_unit_size (GstBaseTransform * base,
+ GstCaps * caps, gsize * size);
+
+
+ static void
+ gst_bayer2rgb_class_init (GstBayer2RGBClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
-
++ GstCaps *tmp = NULL;
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_bayer2rgb_set_property;
+ gobject_class->get_property = gst_bayer2rgb_get_property;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Bayer to RGB decoder for cameras", "Filter/Converter/Video",
+ "Converts video/x-bayer to video/x-raw",
+ "William Brack <wbrack@mmm.com.hk>");
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_caps_from_string (SRC_CAPS)));
++ tmp = gst_caps_from_string (SRC_CAPS)));
++ gst_caps_unref (tmp);
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
- gst_caps_from_string (SINK_CAPS)));
++ tmp = gst_caps_from_string (SINK_CAPS)));
++ gst_caps_unref (tmp);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_bayer2rgb_transform_caps);
+ GST_BASE_TRANSFORM_CLASS (klass)->get_unit_size =
+ GST_DEBUG_FUNCPTR (gst_bayer2rgb_get_unit_size);
+ GST_BASE_TRANSFORM_CLASS (klass)->set_caps =
+ GST_DEBUG_FUNCPTR (gst_bayer2rgb_set_caps);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform =
+ GST_DEBUG_FUNCPTR (gst_bayer2rgb_transform);
+
+ GST_DEBUG_CATEGORY_INIT (gst_bayer2rgb_debug, "bayer2rgb", 0,
+ "bayer2rgb element");
+ }
+
+ static void
+ gst_bayer2rgb_init (GstBayer2RGB * filter)
+ {
+ gst_bayer2rgb_reset (filter);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+ }
+
+ /* No properties are implemented, so only a warning is produced */
+ static void
+ gst_bayer2rgb_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_bayer2rgb_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ gst_bayer2rgb_set_caps (GstBaseTransform * base, GstCaps * incaps,
+ GstCaps * outcaps)
+ {
+ GstBayer2RGB *bayer2rgb = GST_BAYER2RGB (base);
+ GstStructure *structure;
+ const char *format;
+ GstVideoInfo info;
+
+ GST_DEBUG ("in caps %" GST_PTR_FORMAT " out caps %" GST_PTR_FORMAT, incaps,
+ outcaps);
+
+ structure = gst_caps_get_structure (incaps, 0);
+
+ gst_structure_get_int (structure, "width", &bayer2rgb->width);
+ gst_structure_get_int (structure, "height", &bayer2rgb->height);
+
+ format = gst_structure_get_string (structure, "format");
+ if (g_str_equal (format, "bggr")) {
+ bayer2rgb->format = GST_BAYER_2_RGB_FORMAT_BGGR;
+ } else if (g_str_equal (format, "gbrg")) {
+ bayer2rgb->format = GST_BAYER_2_RGB_FORMAT_GBRG;
+ } else if (g_str_equal (format, "grbg")) {
+ bayer2rgb->format = GST_BAYER_2_RGB_FORMAT_GRBG;
+ } else if (g_str_equal (format, "rggb")) {
+ bayer2rgb->format = GST_BAYER_2_RGB_FORMAT_RGGB;
+ } else {
+ return FALSE;
+ }
+
+ /* To cater for different RGB formats, we need to set params for later */
+ gst_video_info_from_caps (&info, outcaps);
+ bayer2rgb->r_off = GST_VIDEO_INFO_COMP_OFFSET (&info, 0);
+ bayer2rgb->g_off = GST_VIDEO_INFO_COMP_OFFSET (&info, 1);
+ bayer2rgb->b_off = GST_VIDEO_INFO_COMP_OFFSET (&info, 2);
+
+ bayer2rgb->info = info;
+
+ return TRUE;
+ }
+
+ static void
+ gst_bayer2rgb_reset (GstBayer2RGB * filter)
+ {
+ filter->width = 0;
+ filter->height = 0;
+ filter->r_off = 0;
+ filter->g_off = 0;
+ filter->b_off = 0;
+ gst_video_info_init (&filter->info);
+ }
+
+ static GstCaps *
+ gst_bayer2rgb_transform_caps (GstBaseTransform * base,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+ {
+ GstBayer2RGB *bayer2rgb;
+ GstCaps *res_caps, *tmp_caps;
+ GstStructure *structure;
+ guint i, caps_size;
+
+ bayer2rgb = GST_BAYER2RGB (base);
+
+ res_caps = gst_caps_copy (caps);
+ caps_size = gst_caps_get_size (res_caps);
+ for (i = 0; i < caps_size; i++) {
+ structure = gst_caps_get_structure (res_caps, i);
+ if (direction == GST_PAD_SINK) {
+ gst_structure_set_name (structure, "video/x-raw");
+ gst_structure_remove_field (structure, "format");
+ } else {
+ gst_structure_set_name (structure, "video/x-bayer");
+ gst_structure_remove_fields (structure, "format", "colorimetry",
+ "chroma-site", NULL);
+ }
+ }
+ if (filter) {
+ tmp_caps = res_caps;
+ res_caps =
+ gst_caps_intersect_full (filter, tmp_caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp_caps);
+ }
+ GST_DEBUG_OBJECT (bayer2rgb, "transformed %" GST_PTR_FORMAT " into %"
+ GST_PTR_FORMAT, caps, res_caps);
+ return res_caps;
+ }
+
+ static gboolean
+ gst_bayer2rgb_get_unit_size (GstBaseTransform * base, GstCaps * caps,
+ gsize * size)
+ {
+ GstStructure *structure;
+ int width;
+ int height;
+ const char *name;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ name = gst_structure_get_name (structure);
+ /* Our name must be either video/x-bayer video/x-raw */
+ if (strcmp (name, "video/x-raw")) {
+ *size = GST_ROUND_UP_4 (width) * height;
+ return TRUE;
+ } else {
+ /* For output, calculate according to format (always 32 bits) */
+ *size = width * height * 4;
+ return TRUE;
+ }
+
+ }
+ GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
+ ("Incomplete caps, some required field missing"));
+ return FALSE;
+ }
+
+ static void
+ gst_bayer2rgb_split_and_upsample_horiz (guint8 * dest0, guint8 * dest1,
+ const guint8 * src, int n)
+ {
+ int i;
+
+ dest0[0] = src[0];
+ dest1[0] = src[1];
+ dest0[1] = (src[0] + src[2] + 1) >> 1;
+ dest1[1] = src[1];
+
+ #if defined(__i386__) || defined(__amd64__)
+ bayer_orc_horiz_upsample_unaligned (dest0 + 2, dest1 + 2, src + 1,
+ (n - 4) >> 1);
+ #else
+ bayer_orc_horiz_upsample (dest0 + 2, dest1 + 2, src + 2, (n - 4) >> 1);
+ #endif
+
+ for (i = n - 2; i < n; i++) {
+ if ((i & 1) == 0) {
+ dest0[i] = src[i];
+ dest1[i] = src[i - 1];
+ } else {
+ dest0[i] = src[i - 1];
+ dest1[i] = src[i];
+ }
+ }
+ }
+
+ typedef void (*process_func) (guint8 * d0, const guint8 * s0, const guint8 * s1,
+ const guint8 * s2, const guint8 * s3, const guint8 * s4, const guint8 * s5,
+ int n);
+
+ static void
+ gst_bayer2rgb_process (GstBayer2RGB * bayer2rgb, uint8_t * dest,
+ int dest_stride, uint8_t * src, int src_stride)
+ {
+ int j;
+ guint8 *tmp;
+ process_func merge[2] = { NULL, NULL };
+ int r_off, g_off, b_off;
+
+ /* We exploit some symmetry in the functions here. The base functions
+ * are all named for the BGGR arrangement. For RGGB, we swap the
+ * red offset and blue offset in the output. For GRBG, we swap the
+ * order of the merge functions. For GBRG, do both. */
+ r_off = bayer2rgb->r_off;
+ g_off = bayer2rgb->g_off;
+ b_off = bayer2rgb->b_off;
+ if (bayer2rgb->format == GST_BAYER_2_RGB_FORMAT_RGGB ||
+ bayer2rgb->format == GST_BAYER_2_RGB_FORMAT_GBRG) {
+ r_off = bayer2rgb->b_off;
+ b_off = bayer2rgb->r_off;
+ }
+
+ if (r_off == 2 && g_off == 1 && b_off == 0) {
+ merge[0] = bayer_orc_merge_bg_bgra;
+ merge[1] = bayer_orc_merge_gr_bgra;
+ } else if (r_off == 3 && g_off == 2 && b_off == 1) {
+ merge[0] = bayer_orc_merge_bg_abgr;
+ merge[1] = bayer_orc_merge_gr_abgr;
+ } else if (r_off == 1 && g_off == 2 && b_off == 3) {
+ merge[0] = bayer_orc_merge_bg_argb;
+ merge[1] = bayer_orc_merge_gr_argb;
+ } else if (r_off == 0 && g_off == 1 && b_off == 2) {
+ merge[0] = bayer_orc_merge_bg_rgba;
+ merge[1] = bayer_orc_merge_gr_rgba;
+ }
+ if (bayer2rgb->format == GST_BAYER_2_RGB_FORMAT_GRBG ||
+ bayer2rgb->format == GST_BAYER_2_RGB_FORMAT_GBRG) {
+ process_func tmp = merge[0];
+ merge[0] = merge[1];
+ merge[1] = tmp;
+ }
+
+ tmp = g_malloc (2 * 4 * bayer2rgb->width);
+ #define LINE(x) (tmp + ((x)&7) * bayer2rgb->width)
+
+ gst_bayer2rgb_split_and_upsample_horiz (LINE (3 * 2 + 0), LINE (3 * 2 + 1),
+ src + 1 * src_stride, bayer2rgb->width);
+ j = 0;
+ gst_bayer2rgb_split_and_upsample_horiz (LINE (j * 2 + 0), LINE (j * 2 + 1),
+ src + j * src_stride, bayer2rgb->width);
+
+ for (j = 0; j < bayer2rgb->height; j++) {
+ if (j < bayer2rgb->height - 1) {
+ gst_bayer2rgb_split_and_upsample_horiz (LINE ((j + 1) * 2 + 0),
+ LINE ((j + 1) * 2 + 1), src + (j + 1) * src_stride, bayer2rgb->width);
+ }
+
+ merge[j & 1] (dest + j * dest_stride,
+ LINE (j * 2 - 2), LINE (j * 2 - 1),
+ LINE (j * 2 + 0), LINE (j * 2 + 1),
+ LINE (j * 2 + 2), LINE (j * 2 + 3), bayer2rgb->width >> 1);
+ }
+
+ g_free (tmp);
+ }
+
+
+
+
+ static GstFlowReturn
+ gst_bayer2rgb_transform (GstBaseTransform * base, GstBuffer * inbuf,
+ GstBuffer * outbuf)
+ {
+ GstBayer2RGB *filter = GST_BAYER2RGB (base);
+ GstMapInfo map;
+ uint8_t *output;
+ GstVideoFrame frame;
+
+ GST_DEBUG ("transforming buffer");
+
+ if (!gst_buffer_map (inbuf, &map, GST_MAP_READ))
+ goto map_failed;
+
+ if (!gst_video_frame_map (&frame, &filter->info, outbuf, GST_MAP_WRITE)) {
+ gst_buffer_unmap (inbuf, &map);
+ goto map_failed;
+ }
+
+ output = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
+ gst_bayer2rgb_process (filter, output, frame.info.stride[0],
+ map.data, GST_ROUND_UP_4 (filter->width));
+
+ gst_video_frame_unmap (&frame);
+ gst_buffer_unmap (inbuf, &map);
+
+ return GST_FLOW_OK;
+
+ map_failed:
+ GST_WARNING_OBJECT (base, "Could not map buffer, skipping");
+ return GST_FLOW_OK;
+ }
--- /dev/null
- gst_buffer_unmap (buf, &mapinfo);
+ /* GStreamer ID3v2 tag writer
+ *
+ * Copyright (C) 2006 Christophe Fergeau <teuf@gnome.org>
+ * Copyright (C) 2006-2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Pioneers of the Inevitable <songbird@songbirdnest.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #include "id3tag.h"
+ #include <string.h>
+
+ #include <gst/tag/tag.h>
+
+ GST_DEBUG_CATEGORY_EXTERN (gst_id3_mux_debug);
+ #define GST_CAT_DEFAULT gst_id3_mux_debug
+
+ #define ID3V2_APIC_PICTURE_OTHER 0
+ #define ID3V2_APIC_PICTURE_FILE_ICON 1
+
+ /* ======================================================================== */
+
+ typedef GString GstByteWriter;
+
+ static inline GstByteWriter *
+ gst_byte_writer_new (guint size)
+ {
+ return (GstByteWriter *) g_string_sized_new (size);
+ }
+
+ static inline guint
+ gst_byte_writer_get_length (GstByteWriter * w)
+ {
+ return ((GString *) w)->len;
+ }
+
+ static inline void
+ gst_byte_writer_write_bytes (GstByteWriter * w, const guint8 * data, guint len)
+ {
+ g_string_append_len ((GString *) w, (const gchar *) data, len);
+ }
+
+ static inline void
+ gst_byte_writer_write_uint8 (GstByteWriter * w, guint8 val)
+ {
+ guint8 data[1];
+
+ GST_WRITE_UINT8 (data, val);
+ gst_byte_writer_write_bytes (w, data, 1);
+ }
+
+ static inline void
+ gst_byte_writer_write_uint16 (GstByteWriter * w, guint16 val)
+ {
+ guint8 data[2];
+
+ GST_WRITE_UINT16_BE (data, val);
+ gst_byte_writer_write_bytes (w, data, 2);
+ }
+
+ static inline void
+ gst_byte_writer_write_uint32 (GstByteWriter * w, guint32 val)
+ {
+ guint8 data[4];
+
+ GST_WRITE_UINT32_BE (data, val);
+ gst_byte_writer_write_bytes (w, data, 4);
+ }
+
+ static inline void
+ gst_byte_writer_write_uint32_syncsafe (GstByteWriter * w, guint32 val)
+ {
+ guint8 data[4];
+
+ data[0] = (guint8) ((val >> 21) & 0x7f);
+ data[1] = (guint8) ((val >> 14) & 0x7f);
+ data[2] = (guint8) ((val >> 7) & 0x7f);
+ data[3] = (guint8) ((val >> 0) & 0x7f);
+ gst_byte_writer_write_bytes (w, data, 4);
+ }
+
+ static void
+ gst_byte_writer_copy_bytes (GstByteWriter * w, guint8 * dest, guint offset,
+ gint size)
+ {
+ guint length;
+
+ length = gst_byte_writer_get_length (w);
+
+ if (size == -1)
+ size = length - offset;
+
+ g_warn_if_fail (length >= (offset + size));
+
+ memcpy (dest, w->str + offset, MIN (size, length - offset));
+ }
+
+ static inline void
+ gst_byte_writer_free (GstByteWriter * w)
+ {
+ g_string_free (w, TRUE);
+ }
+
+ /* ======================================================================== */
+
+ /*
+ typedef enum {
+ GST_ID3V2_FRAME_FLAG_NONE = 0,
+ GST_ID3V2_FRAME_FLAG_
+ } GstID3v2FrameMsgFlags;
+ */
+
+ typedef struct
+ {
+ gchar id[5];
+ guint32 len; /* Length encoded in the header; this is the
+ total length - header size */
+ guint16 flags;
+ GstByteWriter *writer;
+ gboolean dirty; /* TRUE if frame header needs updating */
+ } GstId3v2Frame;
+
+ typedef struct
+ {
+ GArray *frames;
+ guint major_version; /* The 3 in v2.3.0 */
+ } GstId3v2Tag;
+
+ typedef void (*GstId3v2AddTagFunc) (GstId3v2Tag * tag, const GstTagList * list,
+ const gchar * gst_tag, guint num_tags, const gchar * data);
+
+ #define ID3V2_ENCODING_ISO_8859_1 0x00
+ #define ID3V2_ENCODING_UTF16_BOM 0x01
+ #define ID3V2_ENCODING_UTF8 0x03
+
+ static gboolean id3v2_tag_init (GstId3v2Tag * tag, guint major_version);
+ static void id3v2_tag_unset (GstId3v2Tag * tag);
+
+ static void id3v2_frame_init (GstId3v2Frame * frame,
+ const gchar * frame_id, guint16 flags);
+ static void id3v2_frame_unset (GstId3v2Frame * frame);
+ static void id3v2_frame_finish (GstId3v2Tag * tag, GstId3v2Frame * frame);
+ static guint id3v2_frame_get_size (GstId3v2Tag * tag, GstId3v2Frame * frame);
+
+ static void id3v2_tag_add_text_frame (GstId3v2Tag * tag,
+ const gchar * frame_id, const gchar ** strings, int num_strings);
+ static void id3v2_tag_add_simple_text_frame (GstId3v2Tag * tag,
+ const gchar * frame_id, const gchar * string);
+
+ static gboolean
+ id3v2_tag_init (GstId3v2Tag * tag, guint major_version)
+ {
+ if (major_version != 3 && major_version != 4)
+ return FALSE;
+
+ tag->major_version = major_version;
+ tag->frames = g_array_new (TRUE, TRUE, sizeof (GstId3v2Frame));
+
+ return TRUE;
+ }
+
+ static void
+ id3v2_tag_unset (GstId3v2Tag * tag)
+ {
+ guint i;
+
+ for (i = 0; i < tag->frames->len; ++i)
+ id3v2_frame_unset (&g_array_index (tag->frames, GstId3v2Frame, i));
+
+ g_array_free (tag->frames, TRUE);
+ memset (tag, 0, sizeof (GstId3v2Tag));
+ }
+
+ #ifndef GST_ROUND_UP_1024
+ #define GST_ROUND_UP_1024(num) (((num)+1023)&~1023)
+ #endif
+
+ static GstBuffer *
+ id3v2_tag_to_buffer (GstId3v2Tag * tag)
+ {
+ GstByteWriter *w;
+ GstMapInfo info;
+ GstBuffer *buf;
+ guint8 *dest;
+ guint i, size, offset, size_frames = 0;
+
+ GST_DEBUG ("Creating buffer for ID3v2 tag containing %d frames",
+ tag->frames->len);
+
+ for (i = 0; i < tag->frames->len; ++i) {
+ GstId3v2Frame *frame = &g_array_index (tag->frames, GstId3v2Frame, i);
+
+ id3v2_frame_finish (tag, frame);
+ size_frames += id3v2_frame_get_size (tag, frame);
+ }
+
+ size = GST_ROUND_UP_1024 (10 + size_frames);
+
+ w = gst_byte_writer_new (10);
+ gst_byte_writer_write_uint8 (w, 'I');
+ gst_byte_writer_write_uint8 (w, 'D');
+ gst_byte_writer_write_uint8 (w, '3');
+ gst_byte_writer_write_uint8 (w, tag->major_version);
+ gst_byte_writer_write_uint8 (w, 0); /* micro version */
+ gst_byte_writer_write_uint8 (w, 0); /* flags */
+ gst_byte_writer_write_uint32_syncsafe (w, size - 10);
+
+ buf = gst_buffer_new_allocate (NULL, size, NULL);
+ gst_buffer_map (buf, &info, GST_MAP_WRITE);
+ dest = info.data;
+ gst_byte_writer_copy_bytes (w, dest, 0, 10);
+ offset = 10;
+
+ for (i = 0; i < tag->frames->len; ++i) {
+ GstId3v2Frame *frame = &g_array_index (tag->frames, GstId3v2Frame, i);
+
+ gst_byte_writer_copy_bytes (frame->writer, dest + offset, 0, -1);
+ offset += id3v2_frame_get_size (tag, frame);
+ }
+
+ /* Zero out any additional space in our buffer as padding. */
+ memset (dest + offset, 0, size - offset);
+
+ gst_byte_writer_free (w);
+ gst_buffer_unmap (buf, &info);
+
+ return buf;
+ }
+
+ static inline void
+ id3v2_frame_write_bytes (GstId3v2Frame * frame, const guint8 * data, guint len)
+ {
+ gst_byte_writer_write_bytes (frame->writer, data, len);
+ frame->dirty = TRUE;
+ }
+
+ static inline void
+ id3v2_frame_write_uint8 (GstId3v2Frame * frame, guint8 val)
+ {
+ gst_byte_writer_write_uint8 (frame->writer, val);
+ frame->dirty = TRUE;
+ }
+
+ static inline void
+ id3v2_frame_write_uint16 (GstId3v2Frame * frame, guint16 val)
+ {
+ gst_byte_writer_write_uint16 (frame->writer, val);
+ frame->dirty = TRUE;
+ }
+
+ static inline void
+ id3v2_frame_write_uint32 (GstId3v2Frame * frame, guint32 val)
+ {
+ gst_byte_writer_write_uint32 (frame->writer, val);
+ frame->dirty = TRUE;
+ }
+
+ static void
+ id3v2_frame_init (GstId3v2Frame * frame, const gchar * frame_id, guint16 flags)
+ {
+ g_assert (strlen (frame_id) == 4); /* we only handle 2.3.0/2.4.0 */
+ memcpy (frame->id, frame_id, 4 + 1);
+ frame->flags = flags;
+ frame->len = 0;
+ frame->writer = gst_byte_writer_new (64);
+ id3v2_frame_write_bytes (frame, (const guint8 *) frame->id, 4);
+ id3v2_frame_write_uint32 (frame, 0); /* size, set later */
+ id3v2_frame_write_uint16 (frame, frame->flags);
+ }
+
+ static void
+ id3v2_frame_finish (GstId3v2Tag * tag, GstId3v2Frame * frame)
+ {
+ if (frame->dirty) {
+ frame->len = frame->writer->len - 10;
+ GST_LOG ("[%s] %u bytes", frame->id, frame->len);
+ if (tag->major_version == 3) {
+ GST_WRITE_UINT32_BE (frame->writer->str + 4, frame->len);
+ } else {
+ /* Version 4 uses a syncsafe int here */
+ GST_WRITE_UINT8 (frame->writer->str + 4, (frame->len >> 21) & 0x7f);
+ GST_WRITE_UINT8 (frame->writer->str + 5, (frame->len >> 14) & 0x7f);
+ GST_WRITE_UINT8 (frame->writer->str + 6, (frame->len >> 7) & 0x7f);
+ GST_WRITE_UINT8 (frame->writer->str + 7, (frame->len >> 0) & 0x7f);
+ }
+ frame->dirty = FALSE;
+ }
+ }
+
+ static guint
+ id3v2_frame_get_size (GstId3v2Tag * tag, GstId3v2Frame * frame)
+ {
+ id3v2_frame_finish (tag, frame);
+ return gst_byte_writer_get_length (frame->writer);
+ }
+
+ static void
+ id3v2_frame_unset (GstId3v2Frame * frame)
+ {
+ gst_byte_writer_free (frame->writer);
+ memset (frame, 0, sizeof (GstId3v2Frame));
+ }
+
+ static gboolean
+ id3v2_string_is_ascii (const gchar * string)
+ {
+ while (*string) {
+ if (!g_ascii_isprint (*string++))
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static int
+ id3v2_tag_string_encoding (GstId3v2Tag * tag, const gchar * string)
+ {
+ int encoding;
+ if (tag->major_version == 4) {
+ /* ID3v2.4 supports UTF8, use it unconditionally as it's really the only
+ sensible encoding. */
+ encoding = ID3V2_ENCODING_UTF8;
+ } else {
+ /* If we're not writing v2.4, then check to see if it's ASCII.
+ If it is, write ISO-8859-1 (compatible with ASCII).
+ Otherwise, write UTF-16-LE with a byte order marker.
+ Note that we don't write arbitrary ISO-8859-1 as ISO-8859-1, because much
+ software misuses this - and non-ASCII might confuse it. */
+ if (id3v2_string_is_ascii (string))
+ encoding = ID3V2_ENCODING_ISO_8859_1;
+ else
+ encoding = ID3V2_ENCODING_UTF16_BOM;
+ }
+
+ return encoding;
+ }
+
+ static void
+ id3v2_frame_write_string (GstId3v2Frame * frame, int encoding,
+ const gchar * string, gboolean null_terminate)
+ {
+ int terminator_length;
+ if (encoding == ID3V2_ENCODING_UTF16_BOM) {
+ gsize utf16len;
+ const guint8 bom[] = { 0xFF, 0xFE };
+ /* This converts to little-endian UTF-16 */
+ gchar *utf16 = g_convert (string, -1, "UTF-16LE", "UTF-8",
+ NULL, &utf16len, NULL);
+ if (!utf16) {
+ GST_WARNING ("Failed to convert UTF-8 to UTF-16LE");
+ return;
+ }
+
+ /* Write the BOM */
+ id3v2_frame_write_bytes (frame, (const guint8 *) bom, 2);
+ id3v2_frame_write_bytes (frame, (const guint8 *) utf16, utf16len);
+ if (null_terminate) {
+ /* NUL terminator is 2 bytes, if present. */
+ id3v2_frame_write_uint16 (frame, 0);
+ }
+
+ g_free (utf16);
+ } else {
+ /* write NUL terminator as well if requested */
+ terminator_length = null_terminate ? 1 : 0;
+ id3v2_frame_write_bytes (frame, (const guint8 *) string,
+ strlen (string) + terminator_length);
+ }
+ }
+
+ static void
+ id3v2_tag_add_text_frame (GstId3v2Tag * tag, const gchar * frame_id,
+ const gchar ** strings_utf8, int num_strings)
+ {
+ GstId3v2Frame frame;
+ guint len, i;
+ int encoding;
+
+ if (num_strings < 1 || strings_utf8 == NULL || strings_utf8[0] == NULL) {
+ GST_LOG ("Not adding text frame, no strings");
+ return;
+ }
+
+ id3v2_frame_init (&frame, frame_id, 0);
+
+ encoding = id3v2_tag_string_encoding (tag, strings_utf8[0]);
+ id3v2_frame_write_uint8 (&frame, encoding);
+
+ GST_LOG ("Adding text frame %s with %d strings", frame_id, num_strings);
+
+ for (i = 0; i < num_strings; ++i) {
+ len = strlen (strings_utf8[i]);
+ g_return_if_fail (g_utf8_validate (strings_utf8[i], len, NULL));
+
+ id3v2_frame_write_string (&frame, encoding, strings_utf8[i],
+ i != num_strings - 1);
+
+ /* only v2.4.0 supports multiple strings per frame (according to the
+ * earlier specs tag readers should just ignore everything after the first
+ * string, but we probably shouldn't write anything there, just in case
+ * tag readers that only support the old version are not expecting
+ * more data after the first string) */
+ if (tag->major_version < 4)
+ break;
+ }
+
+ if (i < num_strings - 1) {
+ GST_WARNING ("Only wrote one of multiple string values for text frame %s "
+ "- ID3v2 supports multiple string values only since v2.4.0, but writing"
+ "v2.%u.0 tag", frame_id, tag->major_version);
+ }
+
+ g_array_append_val (tag->frames, frame);
+ }
+
+ static void
+ id3v2_tag_add_simple_text_frame (GstId3v2Tag * tag, const gchar * frame_id,
+ const gchar * string)
+ {
+ id3v2_tag_add_text_frame (tag, frame_id, (const gchar **) &string, 1);
+ }
+
+ /* ====================================================================== */
+
+ static void
+ add_text_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * frame_id)
+ {
+ const gchar **strings;
+ guint n, i;
+
+ GST_LOG ("Adding '%s' frame", frame_id);
+
+ strings = g_new0 (const gchar *, num_tags + 1);
+ for (n = 0, i = 0; n < num_tags; ++n) {
+ if (gst_tag_list_peek_string_index (list, tag, n, &strings[i]) &&
+ strings[i] != NULL) {
+ GST_LOG ("%s: %s[%u] = '%s'", frame_id, tag, i, strings[i]);
+ ++i;
+ }
+ }
+
+ if (strings[0] != NULL) {
+ id3v2_tag_add_text_frame (id3v2tag, frame_id, strings, i);
+ } else {
+ GST_WARNING ("Empty list for tag %s, skipping", tag);
+ }
+
+ g_free ((gchar **) strings);
+ }
+
+ static void
+ add_private_data_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * frame_id)
+ {
+ gint n;
+
+ for (n = 0; n < num_tags; ++n) {
+ GstId3v2Frame frame;
+ GstSample *sample = NULL;
+ const GstStructure *structure = NULL;
+ GstBuffer *binary = NULL;
+ GstBuffer *priv_frame = NULL;
+ const gchar *owner_str = NULL;
+ guint owner_len = 0;
+ GstMapInfo mapinfo;
+
+ if (!gst_tag_list_get_sample_index (list, tag, n, &sample))
+ continue;
+
+ structure = gst_sample_get_info (sample);
+ if (structure != NULL
+ && !strcmp (gst_structure_get_name (structure), "ID3PrivateFrame")) {
+ owner_str = gst_structure_get_string (structure, "owner");
+
+ if (owner_str != NULL) {
+ owner_len = strlen (owner_str) + 1;
+ priv_frame = gst_buffer_new_and_alloc (owner_len);
+ gst_buffer_fill (priv_frame, 0, owner_str, owner_len);
+
+ binary = gst_buffer_ref (gst_sample_get_buffer (sample));
+ priv_frame = gst_buffer_append (priv_frame, binary);
+
+ id3v2_frame_init (&frame, frame_id, 0);
+
+ if (gst_buffer_map (priv_frame, &mapinfo, GST_MAP_READ)) {
+ id3v2_frame_write_bytes (&frame, mapinfo.data, mapinfo.size);
+ g_array_append_val (id3v2tag->frames, frame);
+ gst_buffer_unmap (priv_frame, &mapinfo);
+ } else {
+ GST_WARNING ("Couldn't map priv frame tag buffer");
+ id3v2_frame_unset (&frame);
+ }
+
+ gst_buffer_unref (priv_frame);
+ gst_sample_unref (sample);
+ }
+ } else {
+ GST_WARNING ("Couldn't find ID3PrivateFrame structure");
+ }
+ }
+ }
+
+ static void
+ add_id3v2frame_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ guint i;
+
+ for (i = 0; i < num_tags; ++i) {
+ GstSample *sample;
+ GstBuffer *buf;
+ GstCaps *caps;
+
+ if (!gst_tag_list_get_sample_index (list, tag, i, &sample))
+ continue;
+
+ buf = gst_sample_get_buffer (sample);
+
+ /* FIXME: should use auxiliary sample struct instead of caps for this */
+ caps = gst_sample_get_caps (sample);
+
+ if (buf && caps) {
+ GstStructure *s;
+ gint version = 0;
+
+ s = gst_caps_get_structure (caps, 0);
+ /* We can only add it if this private buffer is for the same ID3 version,
+ because we don't understand the contents at all. */
+ if (s && gst_structure_get_int (s, "version", &version) &&
+ version == id3v2tag->major_version) {
+ GstId3v2Frame frame;
+ GstMapInfo mapinfo;
+ gchar frame_id[5];
+ guint16 flags;
+ guint8 *data;
+ gint size;
+
+ if (!gst_buffer_map (buf, &mapinfo, GST_MAP_READ)) {
+ gst_sample_unref (sample);
+ continue;
+ }
+
+ size = mapinfo.size;
+ data = mapinfo.data;
+
+ if (size >= 10) { /* header size */
+ /* We only get here if the frame version matches the muxer. Since the
+ * muxer only does v2.3 or v2.4, the frame must be one of those - and
+ * so the frame header is the same format */
+ memcpy (frame_id, data, 4);
+ frame_id[4] = 0;
+ flags = GST_READ_UINT16_BE (data + 8);
+
+ id3v2_frame_init (&frame, frame_id, flags);
+ id3v2_frame_write_bytes (&frame, data + 10, size - 10);
+
+ g_array_append_val (id3v2tag->frames, frame);
+ GST_DEBUG ("Added unparsed tag with %d bytes", size);
- /* If we don't have a valid language, match what taglib does for
+ } else {
+ GST_WARNING ("Short ID3v2 frame");
+ }
++
++ gst_buffer_unmap (buf, &mapinfo);
++
+ } else {
+ GST_WARNING ("Discarding unrecognised ID3 tag for different ID3 "
+ "version");
+ }
+ }
+ gst_sample_unref (sample);
+ }
+ }
+
+ static void
+ add_text_tag_v4 (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * frame_id)
+ {
+ if (id3v2tag->major_version == 4)
+ add_text_tag (id3v2tag, list, tag, num_tags, frame_id);
+ else {
+ GST_WARNING ("Cannot serialise tag '%s' in ID3v2.%d", frame_id,
+ id3v2tag->major_version);
+ }
+ }
+
+ static void
+ add_count_or_num_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * frame_id)
+ {
+ static const struct
+ {
+ const gchar *gst_tag;
+ const gchar *corr_count; /* corresponding COUNT tag (if number) */
+ const gchar *corr_num; /* corresponding NUMBER tag (if count) */
+ } corr[] = {
+ {
+ GST_TAG_TRACK_NUMBER, GST_TAG_TRACK_COUNT, NULL}, {
+ GST_TAG_TRACK_COUNT, NULL, GST_TAG_TRACK_NUMBER}, {
+ GST_TAG_ALBUM_VOLUME_NUMBER, GST_TAG_ALBUM_VOLUME_COUNT, NULL}, {
+ GST_TAG_ALBUM_VOLUME_COUNT, NULL, GST_TAG_ALBUM_VOLUME_NUMBER}
+ };
+ guint idx;
+
+ for (idx = 0; idx < G_N_ELEMENTS (corr); ++idx) {
+ if (strcmp (corr[idx].gst_tag, tag) == 0)
+ break;
+ }
+
+ g_assert (idx < G_N_ELEMENTS (corr));
+ g_assert (frame_id && strlen (frame_id) == 4);
+
+ if (corr[idx].corr_num == NULL) {
+ guint number;
+
+ /* number tag */
+ if (gst_tag_list_get_uint_index (list, tag, 0, &number)) {
+ gchar *tag_str;
+ guint count;
+
+ if (gst_tag_list_get_uint_index (list, corr[idx].corr_count, 0, &count))
+ tag_str = g_strdup_printf ("%u/%u", number, count);
+ else
+ tag_str = g_strdup_printf ("%u", number);
+
+ GST_DEBUG ("Setting %s to %s (frame_id = %s)", tag, tag_str, frame_id);
+
+ id3v2_tag_add_simple_text_frame (id3v2tag, frame_id, tag_str);
+ g_free (tag_str);
+ }
+ } else if (corr[idx].corr_count == NULL) {
+ guint count;
+
+ /* count tag */
+ if (gst_tag_list_get_uint_index (list, corr[idx].corr_num, 0, &count)) {
+ GST_DEBUG ("%s handled with %s, skipping", tag, corr[idx].corr_num);
+ } else if (gst_tag_list_get_uint_index (list, tag, 0, &count)) {
+ gchar *tag_str = g_strdup_printf ("0/%u", count);
+ GST_DEBUG ("Setting %s to %s (frame_id = %s)", tag, tag_str, frame_id);
+
+ id3v2_tag_add_simple_text_frame (id3v2tag, frame_id, tag_str);
+ g_free (tag_str);
+ }
+ }
+
+ if (num_tags > 1) {
+ GST_WARNING ("more than one %s, can only handle one", tag);
+ }
+ }
+
+ static void
+ add_bpm_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ gdouble bpm;
+
+ GST_LOG ("Adding BPM frame");
+
+ if (gst_tag_list_get_double (list, tag, &bpm)) {
+ gchar *tag_str;
+
+ /* bpm is stored as an integer in id3 tags, but is a double in
+ * tag lists.
+ */
+ tag_str = g_strdup_printf ("%u", (guint) bpm);
+ GST_DEBUG ("Setting %s to %s", tag, tag_str);
+ id3v2_tag_add_simple_text_frame (id3v2tag, "TBPM", tag_str);
+ g_free (tag_str);
+ }
+
+ if (num_tags > 1) {
+ GST_WARNING ("more than one %s, can only handle one", tag);
+ }
+ }
+
+ static void
+ add_comment_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ guint n;
+
+ GST_LOG ("Adding comment frames");
+ for (n = 0; n < num_tags; ++n) {
+ const gchar *s = NULL;
+
+ if (gst_tag_list_peek_string_index (list, tag, n, &s) && s != NULL) {
+ gchar *desc = NULL, *val = NULL, *lang = NULL;
+ int desclen, vallen, encoding1, encoding2, encoding;
+ GstId3v2Frame frame;
+
+ id3v2_frame_init (&frame, "COMM", 0);
+
+ if (strcmp (tag, GST_TAG_COMMENT) == 0 ||
+ !gst_tag_parse_extended_comment (s, &desc, &lang, &val, TRUE)) {
+ /* create dummy description fields */
+ desc = g_strdup ("Comment");
+ val = g_strdup (s);
+ }
+
++ /* If we don't have a valid language, match what taglib does for
+ unknown languages */
+ if (!lang || strlen (lang) < 3)
+ lang = g_strdup ("XXX");
+
+ desclen = strlen (desc);
+ g_return_if_fail (g_utf8_validate (desc, desclen, NULL));
+ vallen = strlen (val);
+ g_return_if_fail (g_utf8_validate (val, vallen, NULL));
+
+ GST_LOG ("%s[%u] = '%s' (%s|%s|%s)", tag, n, s, GST_STR_NULL (desc),
+ GST_STR_NULL (lang), GST_STR_NULL (val));
+
+ encoding1 = id3v2_tag_string_encoding (id3v2tag, desc);
+ encoding2 = id3v2_tag_string_encoding (id3v2tag, val);
+ encoding = MAX (encoding1, encoding2);
+
+ id3v2_frame_write_uint8 (&frame, encoding);
+ id3v2_frame_write_bytes (&frame, (const guint8 *) lang, 3);
+ /* write description and value */
+ id3v2_frame_write_string (&frame, encoding, desc, TRUE);
+ id3v2_frame_write_string (&frame, encoding, val, FALSE);
+
+ g_free (lang);
+ g_free (desc);
+ g_free (val);
+
+ g_array_append_val (id3v2tag->frames, frame);
+ }
+ }
+ }
+
+ static void
+ add_image_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ guint n;
+
+ for (n = 0; n < num_tags; ++n) {
+ GstSample *sample;
+ GstBuffer *image;
+ GstCaps *caps;
+
+ GST_DEBUG ("image %u/%u", n + 1, num_tags);
+
+ if (!gst_tag_list_get_sample_index (list, tag, n, &sample))
+ continue;
+
+ image = gst_sample_get_buffer (sample);
+ caps = gst_sample_get_caps (sample);
+
+ if (image != NULL && gst_buffer_get_size (image) > 0 &&
+ caps != NULL && !gst_caps_is_empty (caps)) {
+ const gchar *mime_type;
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ mime_type = gst_structure_get_name (s);
+ if (mime_type != NULL) {
+ const gchar *desc = NULL;
+ GstId3v2Frame frame;
+ GstMapInfo mapinfo;
+ int encoding;
+ const GstStructure *info_struct;
+
+ info_struct = gst_sample_get_info (sample);
+ if (!info_struct
+ || !gst_structure_has_name (info_struct, "GstTagImageInfo"))
+ info_struct = NULL;
+
+ /* APIC frame specifies "-->" if we're providing a URL to the image
+ rather than directly embedding it */
+ if (strcmp (mime_type, "text/uri-list") == 0)
+ mime_type = "-->";
+
+ GST_DEBUG ("Attaching picture of %" G_GSIZE_FORMAT " bytes and "
+ "mime type %s", gst_buffer_get_size (image), mime_type);
+
+ id3v2_frame_init (&frame, "APIC", 0);
+
+ if (info_struct)
+ desc = gst_structure_get_string (info_struct, "image-description");
+ if (!desc)
+ desc = "";
+ encoding = id3v2_tag_string_encoding (id3v2tag, desc);
+ id3v2_frame_write_uint8 (&frame, encoding);
+
+ id3v2_frame_write_string (&frame, encoding, mime_type, TRUE);
+
+ if (strcmp (tag, GST_TAG_PREVIEW_IMAGE) == 0) {
+ id3v2_frame_write_uint8 (&frame, ID3V2_APIC_PICTURE_FILE_ICON);
+ } else {
+ int image_type = ID3V2_APIC_PICTURE_OTHER;
+
+ if (info_struct) {
+ if (gst_structure_get (info_struct, "image-type",
+ GST_TYPE_TAG_IMAGE_TYPE, &image_type, NULL)) {
+ if (image_type > 0 && image_type <= 18) {
+ image_type += 2;
+ } else {
+ image_type = ID3V2_APIC_PICTURE_OTHER;
+ }
+ } else {
+ image_type = ID3V2_APIC_PICTURE_OTHER;
+ }
+ }
+ id3v2_frame_write_uint8 (&frame, image_type);
+ }
+
+ id3v2_frame_write_string (&frame, encoding, desc, TRUE);
+
+ if (gst_buffer_map (image, &mapinfo, GST_MAP_READ)) {
+ id3v2_frame_write_bytes (&frame, mapinfo.data, mapinfo.size);
+ g_array_append_val (id3v2tag->frames, frame);
+ gst_buffer_unmap (image, &mapinfo);
+ } else {
+ GST_WARNING ("Couldn't map image tag buffer");
+ id3v2_frame_unset (&frame);
+ }
+ }
+ } else {
+ GST_WARNING ("no image or caps: %p, caps=%" GST_PTR_FORMAT, image, caps);
+ }
+ gst_sample_unref (sample);
+ }
+ }
+
+ static void
+ add_musicbrainz_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * data)
+ {
+ static const struct
+ {
+ const gchar gst_tag[28];
+ const gchar spec_id[28];
+ const gchar realworld_id[28];
+ } mb_ids[] = {
+ {
+ GST_TAG_MUSICBRAINZ_ARTISTID, "MusicBrainz Artist Id",
+ "musicbrainz_artistid"}, {
+ GST_TAG_MUSICBRAINZ_ALBUMID, "MusicBrainz Album Id", "musicbrainz_albumid"}, {
+ GST_TAG_MUSICBRAINZ_ALBUMARTISTID, "MusicBrainz Album Artist Id",
+ "musicbrainz_albumartistid"}, {
+ GST_TAG_MUSICBRAINZ_TRMID, "MusicBrainz TRM Id", "musicbrainz_trmid"}, {
+ GST_TAG_CDDA_MUSICBRAINZ_DISCID, "MusicBrainz DiscID",
+ "musicbrainz_discid"}, {
+ /* the following one is more or less made up, there seems to be little
+ * evidence that any popular application is actually putting this info
+ * into TXXX frames; the first one comes from a musicbrainz wiki 'proposed
+ * tags' page, the second one is analogue to the vorbis/ape/flac tag. */
+ GST_TAG_CDDA_CDDB_DISCID, "CDDB DiscID", "discid"}
+ };
+ guint i, idx;
+
+ idx = (guint8) data[0];
+ g_assert (idx < G_N_ELEMENTS (mb_ids));
+
+ for (i = 0; i < num_tags; ++i) {
+ const gchar *id_str;
+
+ if (gst_tag_list_peek_string_index (list, tag, 0, &id_str) && id_str) {
+ /* add two frames, one with the ID the musicbrainz.org spec mentions
+ * and one with the ID that applications use in the real world */
+ GstId3v2Frame frame1, frame2;
+ int encoding;
+
+ GST_DEBUG ("Setting '%s' to '%s'", mb_ids[idx].spec_id, id_str);
+ encoding = id3v2_tag_string_encoding (id3v2tag, id_str);
+
+ id3v2_frame_init (&frame1, "TXXX", 0);
+ id3v2_frame_write_uint8 (&frame1, encoding);
+ id3v2_frame_write_string (&frame1, encoding, mb_ids[idx].spec_id, TRUE);
+ id3v2_frame_write_string (&frame1, encoding, id_str, FALSE);
+ g_array_append_val (id3v2tag->frames, frame1);
+
+ id3v2_frame_init (&frame2, "TXXX", 0);
+ id3v2_frame_write_uint8 (&frame2, encoding);
+ id3v2_frame_write_string (&frame2, encoding,
+ mb_ids[idx].realworld_id, TRUE);
+ id3v2_frame_write_string (&frame2, encoding, id_str, FALSE);
+ g_array_append_val (id3v2tag->frames, frame2);
+ }
+ }
+ }
+
+ static void
+ add_unique_file_id_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ const gchar *origin = "http://musicbrainz.org";
+ const gchar *id_str = NULL;
+
+ if (gst_tag_list_peek_string_index (list, tag, 0, &id_str) && id_str) {
+ GstId3v2Frame frame;
+
+ GST_LOG ("Adding %s (%s): %s", tag, origin, id_str);
+
+ id3v2_frame_init (&frame, "UFID", 0);
+ id3v2_frame_write_bytes (&frame, (const guint8 *) origin,
+ strlen (origin) + 1);
+ id3v2_frame_write_bytes (&frame, (const guint8 *) id_str,
+ strlen (id_str) + 1);
+ g_array_append_val (id3v2tag->frames, frame);
+ }
+ }
+
+ static void
+ add_date_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ guint n;
+ guint i = 0;
+ const gchar *frame_id;
+ gchar **strings;
+
+ if (id3v2tag->major_version == 3)
+ frame_id = "TYER";
+ else
+ frame_id = "TDRC";
+
+ GST_LOG ("Adding date time frame");
+
+ strings = g_new0 (gchar *, num_tags + 1);
+ for (n = 0; n < num_tags; ++n) {
+ GstDateTime *dt = NULL;
+ guint year;
+ gchar *s;
+
+ if (!gst_tag_list_get_date_time_index (list, tag, n, &dt) || dt == NULL)
+ continue;
+
+ year = gst_date_time_get_year (dt);
+ if (year > 500 && year < 2100) {
+ s = g_strdup_printf ("%u", year);
+ GST_LOG ("%s[%u] = '%s'", tag, n, s);
+ strings[i] = s;
+ i++;
+ } else {
+ GST_WARNING ("invalid year %u, skipping", year);
+ }
+
+ if (gst_date_time_has_month (dt)) {
+ if (id3v2tag->major_version == 3)
+ GST_FIXME ("write TDAT and possibly also TIME frame");
+ }
+ gst_date_time_unref (dt);
+ }
+
+ if (strings[0] != NULL) {
+ id3v2_tag_add_text_frame (id3v2tag, frame_id, (const gchar **) strings, i);
+ } else {
+ GST_WARNING ("Empty list for tag %s, skipping", tag);
+ }
+
+ g_strfreev (strings);
+ }
+
+ static void
+ add_encoder_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ guint n;
+ gchar **strings;
+ int i = 0;
+
+ /* ENCODER_VERSION is either handled with the ENCODER tag or not at all */
+ if (strcmp (tag, GST_TAG_ENCODER_VERSION) == 0)
+ return;
+
+ strings = g_new0 (gchar *, num_tags + 1);
+ for (n = 0; n < num_tags; ++n) {
+ const gchar *encoder = NULL;
+
+ if (gst_tag_list_peek_string_index (list, tag, n, &encoder) && encoder) {
+ guint encoder_version;
+ gchar *s;
+
+ if (gst_tag_list_get_uint_index (list, GST_TAG_ENCODER_VERSION, n,
+ &encoder_version) && encoder_version > 0) {
+ s = g_strdup_printf ("%s %u", encoder, encoder_version);
+ } else {
+ s = g_strdup (encoder);
+ }
+
+ GST_LOG ("encoder[%u] = '%s'", n, s);
+ strings[i] = s;
+ i++;
+ }
+ }
+
+ if (strings[0] != NULL) {
+ id3v2_tag_add_text_frame (id3v2tag, "TSSE", (const gchar **) strings, i);
+ } else {
+ GST_WARNING ("Empty list for tag %s, skipping", tag);
+ }
+
+ g_strfreev (strings);
+ }
+
+ static void
+ add_uri_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * frame_id)
+ {
+ const gchar *url = NULL;
+
+ g_assert (frame_id != NULL);
+
+ /* URI tags are limited to one of each per taglist */
+ if (gst_tag_list_peek_string_index (list, tag, 0, &url) && url != NULL) {
+ guint url_len;
+
+ url_len = strlen (url);
+ if (url_len > 0 && gst_uri_is_valid (url)) {
+ GstId3v2Frame frame;
+
+ id3v2_frame_init (&frame, frame_id, 0);
+ id3v2_frame_write_bytes (&frame, (const guint8 *) url, strlen (url) + 1);
+ g_array_append_val (id3v2tag->frames, frame);
+ } else {
+ GST_WARNING ("Tag %s does not contain a valid URI (%s)", tag, url);
+ }
+ }
+ }
+
+ static void
+ add_relative_volume_tag (GstId3v2Tag * id3v2tag, const GstTagList * list,
+ const gchar * tag, guint num_tags, const gchar * unused)
+ {
+ const char *gain_tag_name;
+ const char *peak_tag_name;
+ gdouble peak_val;
+ gdouble gain_val;
+ const char *identification;
+ guint16 peak_int;
+ gint16 gain_int;
+ guint8 peak_bits;
+ GstId3v2Frame frame;
+ const gchar *frame_id;
+
+ /* figure out tag names and the identification string to use */
+ if (strcmp (tag, GST_TAG_TRACK_PEAK) == 0 ||
+ strcmp (tag, GST_TAG_TRACK_GAIN) == 0) {
+ gain_tag_name = GST_TAG_TRACK_GAIN;
+ peak_tag_name = GST_TAG_TRACK_PEAK;
+ identification = "track";
+ GST_DEBUG ("adding track relative-volume frame");
+ } else {
+ gain_tag_name = GST_TAG_ALBUM_GAIN;
+ peak_tag_name = GST_TAG_ALBUM_PEAK;
+ identification = "album";
+
+ if (id3v2tag->major_version == 3) {
+ GST_WARNING ("Cannot store replaygain album gain data in ID3v2.3");
+ return;
+ }
+ GST_DEBUG ("adding album relative-volume frame");
+ }
+
+ /* find the value for the paired tag (gain, if this is peak, and
+ * vice versa). if both tags exist, only write the frame when
+ * we're processing the peak tag.
+ */
+ if (strcmp (tag, GST_TAG_TRACK_PEAK) == 0 ||
+ strcmp (tag, GST_TAG_ALBUM_PEAK) == 0) {
+
+ gst_tag_list_get_double (list, tag, &peak_val);
+
+ if (gst_tag_list_get_tag_size (list, gain_tag_name) > 0) {
+ gst_tag_list_get_double (list, gain_tag_name, &gain_val);
+ GST_DEBUG ("setting volume adjustment %g", gain_val);
+ gain_int = (gint16) (gain_val * 512.0);
+ } else
+ gain_int = 0;
+
+ /* copying mutagen: always write as 16 bits for sanity. */
+ peak_int = (short) (peak_val * G_MAXSHORT);
+ peak_bits = 16;
+ } else {
+ gst_tag_list_get_double (list, tag, &gain_val);
+ GST_DEBUG ("setting volume adjustment %g", gain_val);
+
+ gain_int = (gint16) (gain_val * 512.0);
+ peak_bits = 0;
+ peak_int = 0;
+
+ if (gst_tag_list_get_tag_size (list, peak_tag_name) != 0) {
+ GST_DEBUG
+ ("both gain and peak tags exist, not adding frame this time around");
+ return;
+ }
+ }
+
+ if (id3v2tag->major_version == 4) {
+ /* 2.4: Use RVA2 tag */
+ frame_id = "RVA2";
+ } else {
+ /* 2.3: Use XRVA tag - this is experimental, but useful in the real world.
+ This version only officially supports the 'RVAD' tag, but that appears
+ to not be widely implemented in reality. */
+ frame_id = "XRVA";
+ }
+
+ id3v2_frame_init (&frame, frame_id, 0);
+ id3v2_frame_write_bytes (&frame, (const guint8 *) identification,
+ strlen (identification) + 1);
+ id3v2_frame_write_uint8 (&frame, 0x01); /* Master volume */
+ id3v2_frame_write_uint16 (&frame, gain_int);
+ id3v2_frame_write_uint8 (&frame, peak_bits);
+ if (peak_bits)
+ id3v2_frame_write_uint16 (&frame, peak_int);
+
+ g_array_append_val (id3v2tag->frames, frame);
+ }
+
+ /* id3demux produces these for frames it cannot parse */
+ #define GST_ID3_DEMUX_TAG_ID3V2_FRAME "private-id3v2-frame"
+
+ static const struct
+ {
+ const gchar *gst_tag;
+ const GstId3v2AddTagFunc func;
+ const gchar *data;
+ } add_funcs[] = {
+ {
+ /* Simple text tags */
+ GST_TAG_ARTIST, add_text_tag, "TPE1"}, {
+ GST_TAG_ALBUM_ARTIST, add_text_tag, "TPE2"}, {
+ GST_TAG_TITLE, add_text_tag, "TIT2"}, {
+ GST_TAG_ALBUM, add_text_tag, "TALB"}, {
+ GST_TAG_COPYRIGHT, add_text_tag, "TCOP"}, {
+ GST_TAG_COMPOSER, add_text_tag, "TCOM"}, {
+ GST_TAG_GENRE, add_text_tag, "TCON"}, {
+ GST_TAG_ENCODED_BY, add_text_tag, "TENC"}, {
+ GST_TAG_PUBLISHER, add_text_tag, "TPUB"}, {
+ GST_TAG_INTERPRETED_BY, add_text_tag, "TPE4"}, {
+ GST_TAG_MUSICAL_KEY, add_text_tag, "TKEY"}, {
+
+ /* Private frames */
+ GST_TAG_PRIVATE_DATA, add_private_data_tag, "PRIV"}, {
+ GST_ID3_DEMUX_TAG_ID3V2_FRAME, add_id3v2frame_tag, NULL}, {
+
+ /* Track and album numbers */
+ GST_TAG_TRACK_NUMBER, add_count_or_num_tag, "TRCK"}, {
+ GST_TAG_TRACK_COUNT, add_count_or_num_tag, "TRCK"}, {
+ GST_TAG_ALBUM_VOLUME_NUMBER, add_count_or_num_tag, "TPOS"}, {
+ GST_TAG_ALBUM_VOLUME_COUNT, add_count_or_num_tag, "TPOS"}, {
+
+ /* Comment tags */
+ GST_TAG_COMMENT, add_comment_tag, NULL}, {
+ GST_TAG_EXTENDED_COMMENT, add_comment_tag, NULL}, {
+
+ /* BPM tag */
+ GST_TAG_BEATS_PER_MINUTE, add_bpm_tag, NULL}, {
+
+ /* Images */
+ GST_TAG_IMAGE, add_image_tag, NULL}, {
+ GST_TAG_PREVIEW_IMAGE, add_image_tag, NULL}, {
+
+ /* Misc user-defined text tags for IDs (and UFID frame) */
+ GST_TAG_MUSICBRAINZ_ARTISTID, add_musicbrainz_tag, "\000"}, {
+ GST_TAG_MUSICBRAINZ_ALBUMID, add_musicbrainz_tag, "\001"}, {
+ GST_TAG_MUSICBRAINZ_ALBUMARTISTID, add_musicbrainz_tag, "\002"}, {
+ GST_TAG_MUSICBRAINZ_TRMID, add_musicbrainz_tag, "\003"}, {
+ GST_TAG_CDDA_MUSICBRAINZ_DISCID, add_musicbrainz_tag, "\004"}, {
+ GST_TAG_CDDA_CDDB_DISCID, add_musicbrainz_tag, "\005"}, {
+ GST_TAG_MUSICBRAINZ_TRACKID, add_unique_file_id_tag, NULL}, {
+
+ /* Info about encoder */
+ GST_TAG_ENCODER, add_encoder_tag, NULL}, {
+ GST_TAG_ENCODER_VERSION, add_encoder_tag, NULL}, {
+
+ /* URIs */
+ GST_TAG_COPYRIGHT_URI, add_uri_tag, "WCOP"}, {
+ GST_TAG_LICENSE_URI, add_uri_tag, "WCOP"}, {
+
+ /* Up to here, all the frame ids and contents have been the same between
+ versions 2.3 and 2.4. The rest of them differ... */
+ /* Date (in ID3v2.3, this is a TYER tag. In v2.4, it's a TDRC tag */
+ GST_TAG_DATE_TIME, add_date_tag, NULL}, {
+
+ /* Replaygain data (not really supported in 2.3, we use an experimental
+ tag there) */
+ GST_TAG_TRACK_PEAK, add_relative_volume_tag, NULL}, {
+ GST_TAG_TRACK_GAIN, add_relative_volume_tag, NULL}, {
+ GST_TAG_ALBUM_PEAK, add_relative_volume_tag, NULL}, {
+ GST_TAG_ALBUM_GAIN, add_relative_volume_tag, NULL}, {
+
+ /* Sortable version of various tags. These are all v2.4 ONLY */
+ GST_TAG_ARTIST_SORTNAME, add_text_tag_v4, "TSOP"}, {
+ GST_TAG_ALBUM_SORTNAME, add_text_tag_v4, "TSOA"}, {
+ GST_TAG_TITLE_SORTNAME, add_text_tag_v4, "TSOT"}
+ };
+
+ static void
+ foreach_add_tag (const GstTagList * list, const gchar * tag, gpointer userdata)
+ {
+ GstId3v2Tag *id3v2tag = (GstId3v2Tag *) userdata;
+ guint num_tags, i;
+
+ num_tags = gst_tag_list_get_tag_size (list, tag);
+
+ GST_LOG ("Processing tag %s (num=%u)", tag, num_tags);
+
+ if (num_tags > 1 && gst_tag_is_fixed (tag)) {
+ GST_WARNING ("Multiple occurrences of fixed tag '%s', ignoring some", tag);
+ num_tags = 1;
+ }
+
+ for (i = 0; i < G_N_ELEMENTS (add_funcs); ++i) {
+ if (strcmp (add_funcs[i].gst_tag, tag) == 0) {
+ add_funcs[i].func (id3v2tag, list, tag, num_tags, add_funcs[i].data);
+ break;
+ }
+ }
+
+ if (i == G_N_ELEMENTS (add_funcs)) {
+ GST_WARNING ("Unsupported tag '%s' - not written", tag);
+ }
+ }
+
+ GstBuffer *
+ id3_mux_render_v2_tag (GstTagMux * mux, const GstTagList * taglist, int version)
+ {
+ GstId3v2Tag tag;
+ GstBuffer *buf;
+
+ if (!id3v2_tag_init (&tag, version)) {
+ GST_WARNING_OBJECT (mux, "Unsupported version %d", version);
+ return NULL;
+ }
+
+ /* Render the tag */
+ gst_tag_list_foreach (taglist, foreach_add_tag, &tag);
+
+ #if 0
+ /* Do we want to add our own signature to the tag somewhere? */
+ {
+ gchar *tag_producer_str;
+
+ tag_producer_str = g_strdup_printf ("(GStreamer id3v2mux %s, using "
+ "taglib %u.%u)", VERSION, TAGLIB_MAJOR_VERSION, TAGLIB_MINOR_VERSION);
+ add_one_txxx_tag (id3v2tag, "tag_encoder", tag_producer_str);
+ g_free (tag_producer_str);
+ }
+ #endif
+
+ /* Create buffer with tag */
+ buf = id3v2_tag_to_buffer (&tag);
+ GST_LOG_OBJECT (mux, "tag size = %d bytes", (int) gst_buffer_get_size (buf));
+
+ id3v2_tag_unset (&tag);
+
+ return buf;
+ }
+
+ #define ID3_V1_TAG_SIZE 128
+
+ typedef void (*GstId3v1WriteFunc) (const GstTagList * list,
+ const gchar * gst_tag, guint8 * dst, int len, gboolean * wrote_tag);
+
+ static void
+ latin1_convert (const GstTagList * list, const gchar * tag,
+ guint8 * dst, int maxlen, gboolean * wrote_tag)
+ {
+ gchar *str;
+ gsize len;
+ gchar *latin1;
+
+ if (!gst_tag_list_get_string (list, tag, &str) || str == NULL)
+ return;
+
+ /* Convert to Latin-1 (ISO-8859-1), replacing unrepresentable characters
+ with '?' */
+ latin1 =
+ g_convert_with_fallback (str, -1, "ISO-8859-1", "UTF-8", (char *) "?",
+ NULL, &len, NULL);
+
+ if (latin1 != NULL && *latin1 != '\0') {
+ len = MIN (len, maxlen);
+ memcpy (dst, latin1, len);
+ *wrote_tag = TRUE;
+ g_free (latin1);
+ }
+
+ g_free (str);
+ }
+
+ static void
+ date_v1_convert (const GstTagList * list, const gchar * tag,
+ guint8 * dst, int maxlen, gboolean * wrote_tag)
+ {
+ GstDateTime *dt;
+
+ /* Only one date supported */
+ if (gst_tag_list_get_date_time_index (list, tag, 0, &dt)) {
+ guint year = gst_date_time_get_year (dt);
+ /* Check for plausible year */
+ if (year > 500 && year < 2100) {
+ gchar str[5];
+ g_snprintf (str, 5, "%.4u", year);
+ *wrote_tag = TRUE;
+ memcpy (dst, str, 4);
+ } else {
+ GST_WARNING ("invalid year %u, skipping", year);
+ }
+
+ gst_date_time_unref (dt);
+ }
+ }
+
+ static void
+ genre_v1_convert (const GstTagList * list, const gchar * tag,
+ guint8 * dst, int maxlen, gboolean * wrote_tag)
+ {
+ const gchar *str;
+ int genreidx = -1;
+ guint i, max;
+
+ /* We only support one genre */
+ if (!gst_tag_list_peek_string_index (list, tag, 0, &str) || str == NULL)
+ return;
+
+ max = gst_tag_id3_genre_count ();
+
+ for (i = 0; i < max; i++) {
+ const gchar *genre = gst_tag_id3_genre_get (i);
+ if (g_str_equal (str, genre)) {
+ genreidx = i;
+ break;
+ }
+ }
+
+ if (genreidx >= 0 && genreidx <= 127) {
+ *dst = (guint8) genreidx;
+ *wrote_tag = TRUE;
+ }
+ }
+
+ static void
+ track_number_convert (const GstTagList * list, const gchar * tag,
+ guint8 * dst, int maxlen, gboolean * wrote_tag)
+ {
+ guint tracknum;
+
+ /* We only support one track number */
+ if (!gst_tag_list_get_uint_index (list, tag, 0, &tracknum))
+ return;
+
+ if (tracknum <= 127) {
+ *dst = (guint8) tracknum;
+ *wrote_tag = TRUE;
+ }
+ }
+
+ /* FIXME: get rid of silly table */
+ static const struct
+ {
+ const gchar *gst_tag;
+ const gint offset;
+ const gint length;
+ const GstId3v1WriteFunc func;
+ } v1_funcs[] = {
+ {
+ GST_TAG_TITLE, 3, 30, latin1_convert}, {
+ GST_TAG_ARTIST, 33, 30, latin1_convert}, {
+ GST_TAG_ALBUM, 63, 30, latin1_convert}, {
+ GST_TAG_DATE_TIME, 93, 4, date_v1_convert}, {
+ GST_TAG_COMMENT, 97, 28, latin1_convert}, {
+ /* Note: one-byte gap here */
+ GST_TAG_TRACK_NUMBER, 126, 1, track_number_convert}, {
+ GST_TAG_GENRE, 127, 1, genre_v1_convert}
+ };
+
+ GstBuffer *
+ id3_mux_render_v1_tag (GstTagMux * mux, const GstTagList * taglist)
+ {
+ GstMapInfo info;
+ GstBuffer *buf;
+ guint8 *data;
+ gboolean wrote_tag = FALSE;
+ int i;
+
+ buf = gst_buffer_new_allocate (NULL, ID3_V1_TAG_SIZE, NULL);
+ gst_buffer_map (buf, &info, GST_MAP_WRITE);
+ data = info.data;
+ memset (data, 0, ID3_V1_TAG_SIZE);
+
+ data[0] = 'T';
+ data[1] = 'A';
+ data[2] = 'G';
+
+ /* Genre #0 stands for 'Blues', so init genre field to an invalid number */
+ data[127] = 255;
+
+ for (i = 0; i < G_N_ELEMENTS (v1_funcs); i++) {
+ v1_funcs[i].func (taglist, v1_funcs[i].gst_tag, data + v1_funcs[i].offset,
+ v1_funcs[i].length, &wrote_tag);
+ }
+
+ gst_buffer_unmap (buf, &info);
+
+ if (!wrote_tag) {
+ GST_WARNING_OBJECT (mux, "no ID3v1 tag written (no suitable tags found)");
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+
+ return buf;
+ }
--- /dev/null
+ /*
+ * This library is licensed under 2 different licenses and you
+ * can choose to use it under the terms of either one of them. The
+ * two licenses are the MPL 1.1 and the LGPL.
+ *
+ * MPL:
+ *
+ * The contents of this file are subject to the Mozilla Public License
+ * Version 1.1 (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+ * License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * LGPL:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ * The Original Code is Fluendo MPEG Demuxer plugin.
+ *
+ * The Initial Developer of the Original Code is Fluendo, S.L.
+ * Portions created by Fluendo, S.L. are Copyright (C) 2005
+ * Fluendo, S.L. All Rights Reserved.
+ *
+ * Contributor(s): Wim Taymans <wim@fluendo.com>
+ * Jan Schmidt <thaytan@noraisin.net>
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include <gst/tag/tag.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/base/gstbytereader.h>
+
+ #include "gstmpegdefs.h"
+ #include "gstmpegdemux.h"
+
+ #define BLOCK_SZ 32768
+ #define SCAN_SCR_SZ 12
+ #define SCAN_PTS_SZ 80
+
+ #define SEGMENT_THRESHOLD (300*GST_MSECOND)
+ #define VIDEO_SEGMENT_THRESHOLD (500*GST_MSECOND)
+
+ #define DURATION_SCAN_LIMIT 4 * 1024 * 1024
+
+ typedef enum
+ {
+ SCAN_SCR,
+ SCAN_DTS,
+ SCAN_PTS
+ } SCAN_MODE;
+
+ /* We clamp scr delta with 0 so negative bytes won't be possible */
+ #define GSTTIME_TO_BYTES(time) \
+ ((time != -1) ? gst_util_uint64_scale (MAX(0,(gint64) (GSTTIME_TO_MPEGTIME(time))), demux->scr_rate_n, demux->scr_rate_d) : -1)
+ #define BYTES_TO_GSTTIME(bytes) ((bytes != -1) ? MPEGTIME_TO_GSTTIME(gst_util_uint64_scale (bytes, demux->scr_rate_d, demux->scr_rate_n)) : -1)
+
+ #define ADAPTER_OFFSET_FLUSH(_bytes_) demux->adapter_offset += (_bytes_)
+
+ GST_DEBUG_CATEGORY_STATIC (gstflupsdemux_debug);
+ #define GST_CAT_DEFAULT (gstflupsdemux_debug)
+
+ GST_DEBUG_CATEGORY_EXTERN (mpegpspesfilter_debug);
+
+ /* MPEG2Demux signals and args */
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_IGNORE_SCR,
+ /* FILL ME */
+ };
+
+ #define DEFAULT_IGNORE_SCR FALSE
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) { 1, 2 }, "
+ "systemstream = (boolean) TRUE;" "video/x-cdxa")
+ );
+
+ static GstStaticPadTemplate video_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%02x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) { 1, 2, 4 }, " "systemstream = (boolean) FALSE, "
+ "parsed = (boolean) FALSE; " "video/x-h264, "
+ "stream-format=(string)byte-stream; " "video/x-h265, "
+ "stream-format=(string)byte-stream;")
+ );
+
+ static GstStaticPadTemplate audio_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%02x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion = (int) 1;"
+ "audio/mpeg, mpegversion = (int) 4, stream-format = (string) { adts, loas };"
+ "audio/x-private1-lpcm; "
+ "audio/x-private1-ac3;" "audio/x-private1-dts;" "audio/ac3")
+ );
+
+ static GstStaticPadTemplate subpicture_template =
+ GST_STATIC_PAD_TEMPLATE ("subpicture_%02x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("subpicture/x-dvd")
+ );
+
+ static GstStaticPadTemplate private_template =
+ GST_STATIC_PAD_TEMPLATE ("private_%d",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static void gst_ps_demux_base_init (GstPsDemuxClass * klass);
+ static void gst_ps_demux_class_init (GstPsDemuxClass * klass);
+ static void gst_ps_demux_init (GstPsDemux * demux);
+ static void gst_ps_demux_finalize (GstPsDemux * demux);
+ static void gst_ps_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_ps_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_ps_demux_reset (GstPsDemux * demux);
+
+ static gboolean gst_ps_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static GstFlowReturn gst_ps_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+ static gboolean gst_ps_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static gboolean gst_ps_demux_sink_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static void gst_ps_demux_loop (GstPad * pad);
+
+ static gboolean gst_ps_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_ps_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ static GstStateChangeReturn gst_ps_demux_change_state (GstElement * element,
+ GstStateChange transition);
+
+ static inline gboolean gst_ps_demux_scan_forward_ts (GstPsDemux * demux,
+ guint64 * pos, SCAN_MODE mode, guint64 * rts, gint limit);
+ static inline gboolean gst_ps_demux_scan_backward_ts (GstPsDemux * demux,
+ guint64 * pos, SCAN_MODE mode, guint64 * rts, gint limit);
+
+ static inline void gst_ps_demux_send_gap_updates (GstPsDemux * demux,
+ GstClockTime new_time);
+ static inline void gst_ps_demux_clear_times (GstPsDemux * demux);
+
+ static void gst_ps_demux_reset_psm (GstPsDemux * demux);
+ static void gst_ps_demux_flush (GstPsDemux * demux);
+
+ static GstElementClass *parent_class = NULL;
+
+ static void gst_segment_set_position (GstSegment * segment, GstFormat format,
+ guint64 position);
+ static void gst_segment_set_duration (GstSegment * segment, GstFormat format,
+ guint64 duration);
+
+ /*static guint gst_ps_demux_signals[LAST_SIGNAL] = { 0 };*/
+
+ GType
+ gst_ps_demux_get_type (void)
+ {
+ static GType ps_demux_type = 0;
+
+ if (!ps_demux_type) {
+ static const GTypeInfo ps_demux_info = {
+ sizeof (GstPsDemuxClass),
+ (GBaseInitFunc) gst_ps_demux_base_init,
+ NULL,
+ (GClassInitFunc) gst_ps_demux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstPsDemux),
+ 0,
+ (GInstanceInitFunc) gst_ps_demux_init,
+ NULL
+ };
+
+ ps_demux_type =
+ g_type_register_static (GST_TYPE_ELEMENT, "GstMpegPSDemux",
+ &ps_demux_info, 0);
+
+ GST_DEBUG_CATEGORY_INIT (gstflupsdemux_debug, "mpegpsdemux", 0,
+ "MPEG program stream demultiplexer element");
+ }
+
+ return ps_demux_type;
+ }
+
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (mpegpsdemux, "mpegpsdemux",
+ GST_RANK_PRIMARY, GST_TYPE_PS_DEMUX,
+ GST_DEBUG_CATEGORY_INIT (mpegpspesfilter_debug, "mpegpspesfilter", 0,
+ "MPEG-PS PES filter"));
+
+ static void
+ gst_ps_demux_base_init (GstPsDemuxClass * klass)
+ {
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ klass->sink_template = gst_static_pad_template_get (&sink_template);
+ klass->video_template = gst_static_pad_template_get (&video_template);
+ klass->audio_template = gst_static_pad_template_get (&audio_template);
+ klass->subpicture_template =
+ gst_static_pad_template_get (&subpicture_template);
+ klass->private_template = gst_static_pad_template_get (&private_template);
+
+ gst_element_class_add_pad_template (element_class, klass->video_template);
+ gst_element_class_add_pad_template (element_class, klass->audio_template);
+ gst_element_class_add_pad_template (element_class,
+ klass->subpicture_template);
+ gst_element_class_add_pad_template (element_class, klass->private_template);
+ gst_element_class_add_pad_template (element_class, klass->sink_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "MPEG Program Stream Demuxer", "Codec/Demuxer",
+ "Demultiplexes MPEG Program Streams", "Wim Taymans <wim@fluendo.com>");
+ }
+
+ static void
+ gst_ps_demux_class_init (GstPsDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_ps_demux_finalize;
+ gobject_class->set_property = gst_ps_demux_set_property;
+ gobject_class->get_property = gst_ps_demux_get_property;
+
+ gstelement_class->change_state = gst_ps_demux_change_state;
+
+ /**
+ * GstPsDemux:ignore-scr:
+ *
+ * Ignore SCR (System Clock Reference) data from MPEG-PS Pack Header.
+ * This can help with playback of some broken files.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_IGNORE_SCR,
+ g_param_spec_boolean ("ignore-scr", "Ignore SCR data for timing",
+ "Ignore SCR data for timing", DEFAULT_IGNORE_SCR,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_ps_demux_init (GstPsDemux * demux)
+ {
+ GstPsDemuxClass *klass = GST_PS_DEMUX_GET_CLASS (demux);
+
+ demux->sinkpad = gst_pad_new_from_template (klass->sink_template, "sink");
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_chain));
+ gst_pad_set_activate_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_sink_activate_mode));
+
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+ demux->streams =
+ g_malloc0 (sizeof (GstPsStream *) * (GST_PS_DEMUX_MAX_STREAMS));
+ demux->streams_found =
+ g_malloc0 (sizeof (GstPsStream *) * (GST_PS_DEMUX_MAX_STREAMS));
+ demux->found_count = 0;
+
+ demux->adapter = gst_adapter_new ();
+ demux->rev_adapter = gst_adapter_new ();
+ demux->flowcombiner = gst_flow_combiner_new ();
+
+ gst_ps_demux_reset (demux);
+
+ demux->ignore_scr = DEFAULT_IGNORE_SCR;
+ }
+
+ static void
+ gst_ps_demux_finalize (GstPsDemux * demux)
+ {
+ gst_ps_demux_reset (demux);
+ g_free (demux->streams);
+ g_free (demux->streams_found);
+
+ gst_flow_combiner_free (demux->flowcombiner);
+ g_object_unref (demux->adapter);
+ g_object_unref (demux->rev_adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (demux));
+ }
+
+ static void
+ gst_ps_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_SCR:
+ demux->ignore_scr = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ gst_ps_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_SCR:
+ g_value_set_boolean (value, demux->ignore_scr);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ gst_ps_demux_reset (GstPsDemux * demux)
+ {
+ /* Clean up the streams and pads we allocated */
+ gint i;
+
+ for (i = 0; i < GST_PS_DEMUX_MAX_STREAMS; i++) {
+ GstPsStream *stream = demux->streams[i];
+
+ if (stream != NULL) {
+ if (stream->pad && GST_PAD_PARENT (stream->pad)) {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, stream->pad);
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->pad);
+ } else {
+ gst_object_unref (stream->pad);
+ }
+
+ if (stream->pending_tags)
+ gst_tag_list_unref (stream->pending_tags);
+ g_free (stream);
+ demux->streams[i] = NULL;
+ }
+ }
+ memset (demux->streams_found, 0,
+ sizeof (GstPsStream *) * (GST_PS_DEMUX_MAX_STREAMS));
+ demux->found_count = 0;
+
+ gst_adapter_clear (demux->adapter);
+ gst_adapter_clear (demux->rev_adapter);
+
+ demux->adapter_offset = G_MAXUINT64;
+ demux->first_scr = G_MAXUINT64;
+ demux->last_scr = G_MAXUINT64;
+ demux->current_scr = G_MAXUINT64;
+ demux->base_time = G_MAXUINT64;
+ demux->scr_rate_n = G_MAXUINT64;
+ demux->scr_rate_d = G_MAXUINT64;
+ demux->first_pts = G_MAXUINT64;
+ demux->last_pts = G_MAXUINT64;
+ demux->mux_rate = G_MAXUINT64;
+ demux->next_pts = G_MAXUINT64;
+ demux->next_dts = G_MAXUINT64;
+ demux->need_no_more_pads = TRUE;
+ gst_ps_demux_reset_psm (demux);
+ gst_segment_init (&demux->sink_segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&demux->src_segment, GST_FORMAT_TIME);
+ gst_ps_demux_flush (demux);
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+ }
+
+ static GstPsStream *
+ gst_ps_demux_create_stream (GstPsDemux * demux, gint id, gint stream_type,
+ gint layer)
+ {
+ GstPsStream *stream;
+ GstPadTemplate *template;
+ gchar *name;
+ GstPsDemuxClass *klass = GST_PS_DEMUX_GET_CLASS (demux);
+ GstCaps *caps;
+ GstClockTime threshold = SEGMENT_THRESHOLD;
+ GstEvent *event;
+ gchar *stream_id;
+
+ name = NULL;
+ template = NULL;
+ caps = NULL;
+
+ GST_DEBUG_OBJECT (demux, "create stream id 0x%02x, type 0x%02x", id,
+ stream_type);
+
+ switch (stream_type) {
+ case ST_VIDEO_MPEG1:
+ case ST_VIDEO_MPEG2:
+ case ST_VIDEO_MPEG4:
+ case ST_GST_VIDEO_MPEG1_OR_2:
+ {
+ gint mpeg_version = 1;
+ if (stream_type == ST_VIDEO_MPEG2 ||
+ (stream_type == ST_GST_VIDEO_MPEG1_OR_2 && demux->is_mpeg2_pack)) {
+ mpeg_version = 2;
+ }
+ if (stream_type == ST_VIDEO_MPEG4) {
+ mpeg_version = 4;
+ }
+
+ template = klass->video_template;
+ name = g_strdup_printf ("video_%02x", id);
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, mpeg_version,
+ "systemstream", G_TYPE_BOOLEAN, FALSE,
+ "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
+ threshold = VIDEO_SEGMENT_THRESHOLD;
+ break;
+ }
+ case ST_AUDIO_MPEG1:
+ case ST_AUDIO_MPEG2:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ if (layer) {
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, layer, NULL);
+ } else {
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ }
+ break;
+ case ST_PRIVATE_SECTIONS:
+ case ST_PRIVATE_DATA:
+ case ST_MHEG:
+ case ST_DSMCC:
+ break;
+ case ST_AUDIO_AAC_ADTS:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "adts", NULL);
+ break;
+ case ST_AUDIO_AAC_LOAS: // LATM/LOAS AAC syntax
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "loas", NULL);
+ break;
+ case ST_VIDEO_H264:
+ template = klass->video_template;
+ name = g_strdup_printf ("video_%02x", id);
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ threshold = VIDEO_SEGMENT_THRESHOLD;
+ break;
+ case ST_VIDEO_H265:
+ template = klass->video_template;
+ name = g_strdup_printf ("video_%02x", id);
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ threshold = VIDEO_SEGMENT_THRESHOLD;
+ break;
+
+ case ST_PS_AUDIO_AC3:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_empty_simple ("audio/x-private1-ac3");
+ break;
+ case ST_PS_AUDIO_DTS:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_empty_simple ("audio/x-private1-dts");
+ break;
+ case ST_PS_AUDIO_LPCM:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_empty_simple ("audio/x-private1-lpcm");
+ break;
+ case ST_PS_DVD_SUBPICTURE:
+ template = klass->subpicture_template;
+ name = g_strdup_printf ("subpicture_%02x", id);
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ break;
+ case ST_GST_AUDIO_RAWA52:
+ template = klass->audio_template;
+ name = g_strdup_printf ("audio_%02x", id);
+ caps = gst_caps_new_empty_simple ("audio/ac3");
+ break;
+ default:
+ break;
+ }
+
+ if (name == NULL || template == NULL || caps == NULL) {
+ g_free (name);
+ if (caps)
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ stream = g_new0 (GstPsStream, 1);
+ stream->id = id;
+ stream->discont = TRUE;
+ stream->need_segment = TRUE;
+ stream->notlinked = FALSE;
+ stream->type = stream_type;
+ stream->pending_tags = NULL;
+ stream->pad = gst_pad_new_from_template (template, name);
+ stream->segment_thresh = threshold;
+ gst_pad_set_event_function (stream->pad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_src_event));
+ gst_pad_set_query_function (stream->pad,
+ GST_DEBUG_FUNCPTR (gst_ps_demux_src_query));
+ gst_pad_use_fixed_caps (stream->pad);
+
+ /* needed for set_caps to work */
+ if (!gst_pad_set_active (stream->pad, TRUE)) {
+ GST_WARNING_OBJECT (demux, "Failed to activate pad %" GST_PTR_FORMAT,
+ stream->pad);
+ }
+
+ stream_id =
+ gst_pad_create_stream_id_printf (stream->pad, GST_ELEMENT_CAST (demux),
+ "%02x", id);
+
+ event = gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (event, demux->group_id);
+
+ gst_pad_push_event (stream->pad, event);
+ g_free (stream_id);
+
+ gst_pad_set_caps (stream->pad, caps);
+
+ if (!stream->pending_tags)
+ stream->pending_tags = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (stream->pending_tags, NULL,
+ caps);
+
+ GST_DEBUG_OBJECT (demux, "create pad %s, caps %" GST_PTR_FORMAT, name, caps);
+ gst_caps_unref (caps);
+ g_free (name);
+
+ return stream;
+ }
+
+ static GstPsStream *
+ gst_ps_demux_get_stream (GstPsDemux * demux, gint id, gint type, gint layer)
+ {
+ GstPsStream *stream = demux->streams[id];
+
+ if (stream == NULL) {
+ if (!(stream = gst_ps_demux_create_stream (demux, id, type, layer)))
+ goto unknown_stream;
+
+ GST_DEBUG_OBJECT (demux, "adding pad for stream id 0x%02x type 0x%02x", id,
+ type);
+
+ demux->streams[id] = stream;
+ demux->streams_found[demux->found_count++] = stream;
+
+ if (demux->need_no_more_pads) {
+ gst_element_add_pad (GST_ELEMENT (demux), stream->pad);
+ gst_flow_combiner_add_pad (demux->flowcombiner, stream->pad);
+ } else {
+ /* only likely to confuse decodebin etc, so discard */
+ /* FIXME should perform full switch protocol:
+ * add a whole new set of pads, drop old and no-more-pads again */
+ GST_DEBUG_OBJECT (demux,
+ "but already signalled no-more-pads; not adding");
+ gst_object_ref_sink (stream->pad);
+ }
+ }
+ return stream;
+
+ /* ERROR */
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (demux, "unknown stream id 0x%02x type 0x%02x", id, type);
+ return NULL;
+ }
+ }
+
+ static GstPsStream *
+ gst_ps_demux_get_stream_from_pad (GstPsDemux * demux, GstPad * srcpad)
+ {
+ gint i, count;
+
+ count = demux->found_count;
+ for (i = 0; i < count; i++) {
+ GstPsStream *stream = demux->streams_found[i];
+
+ if (stream && stream->pad == srcpad)
+ return stream;
+ }
+
+ GST_DEBUG_OBJECT (srcpad, "no stream found for pad!");
+ return NULL;
+ }
+
+ static inline void
+ gst_ps_demux_send_segment (GstPsDemux * demux, GstPsStream * stream,
+ GstClockTime pts)
+ {
+ /* discont */
+ if (G_UNLIKELY (stream->need_segment)) {
+ GstSegment segment;
+ GstEvent *segment_event;
+
+ GST_DEBUG ("PTS timestamp:%" GST_TIME_FORMAT " base_time %" GST_TIME_FORMAT
+ " src_segment.start:%" GST_TIME_FORMAT " .stop:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pts), GST_TIME_ARGS (demux->base_time),
+ GST_TIME_ARGS (demux->src_segment.start),
+ GST_TIME_ARGS (demux->src_segment.stop));
+
+ /* we should be in sync with downstream, so start from our segment notion,
+ * which also includes proper base_time etc, tweak it a bit and send */
+ gst_segment_copy_into (&demux->src_segment, &segment);
+ if (GST_CLOCK_TIME_IS_VALID (demux->base_time)) {
+ if (GST_CLOCK_TIME_IS_VALID (segment.start))
+ segment.start += demux->base_time;
+ if (GST_CLOCK_TIME_IS_VALID (segment.stop))
+ segment.stop += demux->base_time;
+ segment.time = segment.start - demux->base_time;
+ }
+
+ segment_event = gst_event_new_segment (&segment);
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ else
+ demux->segment_seqnum = gst_event_get_seqnum (segment_event);
+ GST_INFO_OBJECT (demux, "sending segment event %" GST_SEGMENT_FORMAT
+ " to pad %" GST_PTR_FORMAT, &segment, stream->pad);
+
+ gst_pad_push_event (stream->pad, segment_event);
+
+ stream->need_segment = FALSE;
+ }
+
+ if (G_UNLIKELY (stream->pending_tags)) {
+ GST_DEBUG_OBJECT (demux, "Sending pending_tags %p for pad %s:%s : %"
+ GST_PTR_FORMAT, stream->pending_tags,
+ GST_DEBUG_PAD_NAME (stream->pad), stream->pending_tags);
+ gst_pad_push_event (stream->pad, gst_event_new_tag (stream->pending_tags));
+ stream->pending_tags = NULL;
+ }
+ }
+
+ static GstFlowReturn
+ gst_ps_demux_send_data (GstPsDemux * demux, GstPsStream * stream,
+ GstBuffer * buf)
+ {
+ GstFlowReturn result;
+ GstClockTime pts = GST_CLOCK_TIME_NONE, dts = GST_CLOCK_TIME_NONE;
+ GstClockTime ts;
+
+ if (stream == NULL)
+ goto no_stream;
+
+ /* timestamps */
+ if (G_UNLIKELY (demux->next_pts != G_MAXUINT64))
+ pts = MPEGTIME_TO_GSTTIME (demux->next_pts);
+ if (G_UNLIKELY (demux->next_dts != G_MAXUINT64))
+ dts = MPEGTIME_TO_GSTTIME (demux->next_dts);
+
+ gst_ps_demux_send_segment (demux, stream, pts);
+
+ /* OK, sent new segment now prepare the buffer for sending */
+ GST_BUFFER_PTS (buf) = pts;
+ GST_BUFFER_DTS (buf) = dts;
+
+ /* If we have no DTS but a PTS that means both are the same,
+ * if we have neither than we don't know the current position */
+ ts = dts;
+ if (ts == GST_CLOCK_TIME_NONE)
+ ts = pts;
+
+ /* update position in the segment */
+ if (ts != GST_CLOCK_TIME_NONE && (stream->last_ts == GST_CLOCK_TIME_NONE
+ || stream->last_ts < ts)) {
+ GST_LOG_OBJECT (demux,
+ "last_ts update on pad %s to time %" GST_TIME_FORMAT
+ ", current scr is %" GST_TIME_FORMAT, GST_PAD_NAME (stream->pad),
+ GST_TIME_ARGS (ts),
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->current_scr)));
+ stream->last_ts = ts;
+ if (demux->src_segment.position == GST_CLOCK_TIME_NONE
+ || stream->last_ts > demux->src_segment.position)
+ gst_segment_set_position (&demux->src_segment, GST_FORMAT_TIME,
+ stream->last_ts);
+ }
+
+ /* Set the buffer discont flag, and clear discont state on the stream */
+ if (stream->discont) {
+ GST_DEBUG_OBJECT (demux, "discont buffer to pad %" GST_PTR_FORMAT
+ " with PTS %" GST_TIME_FORMAT " DTS %" GST_TIME_FORMAT,
+ stream->pad, GST_TIME_ARGS (pts), GST_TIME_ARGS (dts));
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ demux->next_pts = G_MAXUINT64;
+ demux->next_dts = G_MAXUINT64;
+
+ GST_LOG_OBJECT (demux, "pushing stream id 0x%02x type 0x%02x, pts time: %"
+ GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,
+ stream->id, stream->type, GST_TIME_ARGS (pts), gst_buffer_get_size (buf));
+ result = gst_pad_push (stream->pad, buf);
+ GST_LOG_OBJECT (demux, "result: %s", gst_flow_get_name (result));
+
+ return result;
+
+ /* ERROR */
+ no_stream:
+ {
+ GST_DEBUG_OBJECT (demux, "no stream given");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+ }
+
+ static inline void
+ gst_ps_demux_mark_discont (GstPsDemux * demux, gboolean discont,
+ gboolean need_segment)
+ {
+ gint i, count = demux->found_count;
+
+ /* mark discont on all streams */
+ for (i = 0; i < count; i++) {
+ GstPsStream *stream = demux->streams_found[i];
+
+ if (G_LIKELY (stream)) {
+ stream->discont |= discont;
+ stream->need_segment |= need_segment;
+ if (need_segment)
+ demux->segment_seqnum = 0;
+ GST_DEBUG_OBJECT (demux, "marked stream as discont %d, need_segment %d",
+ stream->discont, stream->need_segment);
+ }
+ }
+ }
+
+ static gboolean
+ gst_ps_demux_send_event (GstPsDemux * demux, GstEvent * event)
+ {
+ gint i, count = demux->found_count;
+ gboolean ret = FALSE;
+
+ for (i = 0; i < count; i++) {
+ GstPsStream *stream = demux->streams_found[i];
+
+ if (stream) {
+ if (!gst_pad_push_event (stream->pad, gst_event_ref (event))) {
+ GST_DEBUG_OBJECT (stream->pad, "%s event was not handled",
+ GST_EVENT_TYPE_NAME (event));
+ } else {
+ /* If at least one push returns TRUE, then we return TRUE. */
+ GST_DEBUG_OBJECT (stream->pad, "%s event was handled",
+ GST_EVENT_TYPE_NAME (event));
+ ret = TRUE;
+ }
+ }
+ }
+
+ gst_event_unref (event);
+ return ret;
+ }
+
+ static gboolean
+ gst_ps_demux_handle_dvd_event (GstPsDemux * demux, GstEvent * event)
+ {
+ const GstStructure *structure = gst_event_get_structure (event);
+ const char *type = gst_structure_get_string (structure, "event");
+ gint i;
+ gchar cur_stream_name[32];
+ GstPsStream *temp = NULL;
+ const gchar *lang_code;
+
+ if (strcmp (type, "dvd-lang-codes") == 0) {
+ GST_DEBUG_OBJECT (demux, "Handling language codes event");
+
+ /* Create a video pad to ensure have it before emit no more pads */
+ (void) gst_ps_demux_get_stream (demux, 0xe0, ST_VIDEO_MPEG2, 0);
+
+ /* Read out the languages for audio streams and request each one that
+ * is present */
+ for (i = 0; i < MAX_DVD_AUDIO_STREAMS; i++) {
+ gint stream_format;
+ gint stream_id;
+
+ g_snprintf (cur_stream_name, 32, "audio-%d-format", i);
+ if (!gst_structure_get_int (structure, cur_stream_name, &stream_format))
+ continue;
+
+ g_snprintf (cur_stream_name, 32, "audio-%d-stream", i);
+ if (!gst_structure_get_int (structure, cur_stream_name, &stream_id))
+ continue;
+ if (stream_id < 0 || stream_id >= MAX_DVD_AUDIO_STREAMS)
+ continue;
+
+ switch (stream_format) {
+ case 0x0:
+ /* AC3 */
+ stream_id += 0x80;
+ GST_DEBUG_OBJECT (demux,
+ "Audio stream %d format %d ID 0x%02x - AC3", i,
+ stream_format, stream_id);
+ temp = gst_ps_demux_get_stream (demux, stream_id, ST_PS_AUDIO_AC3, 0);
+ break;
+ case 0x2:
+ case 0x3:
+ /* MPEG audio without and with extension stream are
+ * treated the same */
+ stream_id += 0xC0;
+ GST_DEBUG_OBJECT (demux,
+ "Audio stream %d format %d ID 0x%02x - MPEG audio", i,
+ stream_format, stream_id);
+ temp = gst_ps_demux_get_stream (demux, stream_id, ST_AUDIO_MPEG1, 0);
+ break;
+ case 0x4:
+ /* LPCM */
+ stream_id += 0xA0;
+ GST_DEBUG_OBJECT (demux,
+ "Audio stream %d format %d ID 0x%02x - DVD LPCM", i,
+ stream_format, stream_id);
+ temp =
+ gst_ps_demux_get_stream (demux, stream_id, ST_PS_AUDIO_LPCM, 0);
+ break;
+ case 0x6:
+ /* DTS */
+ stream_id += 0x88;
+ GST_DEBUG_OBJECT (demux,
+ "Audio stream %d format %d ID 0x%02x - DTS", i,
+ stream_format, stream_id);
+ temp = gst_ps_demux_get_stream (demux, stream_id, ST_PS_AUDIO_DTS, 0);
+ break;
+ case 0x7:
+ /* FIXME: What range is SDDS? */
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown audio stream format in language code event: %d",
+ stream_format);
+ temp = NULL;
+ continue;
+ }
+
+ if (temp == NULL)
+ continue;
+
+ g_snprintf (cur_stream_name, 32, "audio-%d-language", i);
+ lang_code = gst_structure_get_string (structure, cur_stream_name);
+ if (lang_code) {
+ GstTagList *list = temp->pending_tags;
+
+ if (!list)
+ list = gst_tag_list_new_empty ();
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, lang_code, NULL);
+ temp->pending_tags = list;
+ }
+ }
+
+ /* And subtitle streams */
+ for (i = 0; i < MAX_DVD_SUBPICTURE_STREAMS; i++) {
+ gint stream_id;
+
+ g_snprintf (cur_stream_name, 32, "subpicture-%d-format", i);
+ if (!gst_structure_get_int (structure, cur_stream_name, &stream_id))
+ continue;
+
+ g_snprintf (cur_stream_name, 32, "subpicture-%d-stream", i);
+ if (!gst_structure_get_int (structure, cur_stream_name, &stream_id))
+ continue;
+ if (stream_id < 0 || stream_id >= MAX_DVD_SUBPICTURE_STREAMS)
+ continue;
+
+ GST_DEBUG_OBJECT (demux, "Subpicture stream %d ID 0x%02x", i,
+ 0x20 + stream_id);
+
+ /* Retrieve the subpicture stream to force pad creation */
+ temp = gst_ps_demux_get_stream (demux, 0x20 + stream_id,
+ ST_PS_DVD_SUBPICTURE, 0);
+ if (temp == NULL)
+ continue;
+
+ g_snprintf (cur_stream_name, 32, "subpicture-%d-language", i);
+ lang_code = gst_structure_get_string (structure, cur_stream_name);
+ if (lang_code) {
+ GstTagList *list = temp->pending_tags;
+
+ if (!list)
+ list = gst_tag_list_new_empty ();
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, lang_code, NULL);
+ temp->pending_tags = list;
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "Created all pads from Language Codes event, "
+ "signalling no-more-pads");
+
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->need_no_more_pads = FALSE;
+ } else {
+ /* forward to all pads, e.g. dvd clut event */
+ gst_event_ref (event);
+ gst_ps_demux_send_event (demux, event);
+ }
+
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ static void
+ gst_ps_demux_flush (GstPsDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "flushing demuxer");
+ gst_adapter_clear (demux->adapter);
+ gst_adapter_clear (demux->rev_adapter);
+ gst_pes_filter_drain (&demux->filter);
+ gst_ps_demux_clear_times (demux);
+ demux->adapter_offset = G_MAXUINT64;
+ demux->current_scr = G_MAXUINT64;
+ demux->bytes_since_scr = 0;
+ }
+
+ static inline void
+ gst_ps_demux_clear_times (GstPsDemux * demux)
+ {
+ gint i, count = demux->found_count;
+
+ gst_flow_combiner_reset (demux->flowcombiner);
+ /* Clear the last ts for all streams */
+ for (i = 0; i < count; i++) {
+ GstPsStream *stream = demux->streams_found[i];
+
+ if (G_LIKELY (stream)) {
+ stream->last_ts = GST_CLOCK_TIME_NONE;
+ }
+ }
+ }
+
+ static inline void
+ gst_ps_demux_send_gap_updates (GstPsDemux * demux, GstClockTime new_start)
+ {
+ GstClockTime base_time, stop;
+ gint i, count = demux->found_count;
+ GstEvent *event = NULL;
+
+ if (new_start == GST_CLOCK_TIME_NONE)
+ return;
+
+ /* Advance all lagging streams by sending a gap event */
+ if ((base_time = demux->base_time) == GST_CLOCK_TIME_NONE)
+ base_time = 0;
+
+ stop = demux->src_segment.stop;
+ if (stop != GST_CLOCK_TIME_NONE)
+ stop += base_time;
+
+ if (new_start > stop)
+ return;
+
+ /* FIXME: Handle reverse playback */
+ for (i = 0; i < count; i++) {
+ GstPsStream *stream = demux->streams_found[i];
+
+ if (stream) {
+ if (stream->last_ts == GST_CLOCK_TIME_NONE ||
+ stream->last_ts < demux->src_segment.start + base_time)
+ stream->last_ts = demux->src_segment.start + base_time;
+
+ if (stream->last_ts + stream->segment_thresh < new_start) {
+ /* should send segment info before gap event */
+ gst_ps_demux_send_segment (demux, stream, GST_CLOCK_TIME_NONE);
+
+ GST_LOG_OBJECT (demux,
+ "Sending gap update to pad %s from time %" GST_TIME_FORMAT " to %"
+ GST_TIME_FORMAT, GST_PAD_NAME (stream->pad),
+ GST_TIME_ARGS (stream->last_ts), GST_TIME_ARGS (new_start));
+ event =
+ gst_event_new_gap (stream->last_ts, new_start - stream->last_ts);
+ gst_pad_push_event (stream->pad, event);
+ stream->last_ts = new_start;
+ }
+ }
+ }
+ }
+
+ static inline gboolean
+ have_open_streams (GstPsDemux * demux)
+ {
+ return (demux->streams_found[0] != NULL);
+ }
+
+ static gboolean
+ gst_ps_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ gst_ps_demux_send_event (demux, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_ps_demux_send_event (demux, event);
+ gst_segment_init (&demux->sink_segment, GST_FORMAT_UNDEFINED);
+ gst_ps_demux_flush (demux);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+ gst_segment_copy_into (segment, &demux->sink_segment);
+
+ GST_INFO_OBJECT (demux, "received segment %" GST_SEGMENT_FORMAT, segment);
+
+ /* we need to emit a new segment */
+ gst_ps_demux_mark_discont (demux, TRUE, TRUE);
+
+ if (segment->format == GST_FORMAT_BYTES
+ && demux->scr_rate_n != G_MAXUINT64
+ && demux->scr_rate_d != G_MAXUINT64) {
+ demux->src_segment.rate = segment->rate;
+ demux->src_segment.applied_rate = segment->applied_rate;
+ demux->src_segment.format = GST_FORMAT_TIME;
+ demux->src_segment.start = BYTES_TO_GSTTIME (segment->start);
+ demux->src_segment.stop = BYTES_TO_GSTTIME (segment->stop);
+ demux->src_segment.time = BYTES_TO_GSTTIME (segment->time);
+ } else if (segment->format == GST_FORMAT_TIME) {
+ /* we expect our timeline (SCR, PTS) to match the one from upstream,
+ * if not, will adjust with offset later on */
+ gst_segment_copy_into (segment, &demux->src_segment);
+ }
+
+ gst_event_unref (event);
+
+ break;
+ }
+ case GST_EVENT_EOS:
+ GST_INFO_OBJECT (demux, "Received EOS");
+ if (!gst_ps_demux_send_event (demux, event)
+ && !have_open_streams (demux)) {
+ GST_WARNING_OBJECT (demux, "EOS and no streams open");
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("No valid streams detected"));
+ }
+ break;
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:
+ {
+ const GstStructure *structure = gst_event_get_structure (event);
+
+ if (structure != NULL
+ && gst_structure_has_name (structure, "application/x-gst-dvd")) {
+ res = gst_ps_demux_handle_dvd_event (demux, event);
+ } else {
+ gst_ps_demux_send_event (demux, event);
+ }
+ break;
+ }
+ case GST_EVENT_CAPS:
+ gst_event_unref (event);
+ break;
+ default:
+ gst_ps_demux_send_event (demux, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_ps_demux_handle_seek_push (GstPsDemux * demux, GstEvent * event)
+ {
+ gboolean res = FALSE;
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gint64 bstart, bstop;
+ GstEvent *bevent;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT
+ " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ if (format == GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (demux, "seek not supported on format %d", format);
+ goto not_supported;
+ }
+
+ GST_DEBUG_OBJECT (demux, "seek - trying directly upstream first");
+
+ /* first try original format seek */
+ (void) gst_event_ref (event);
+ if ((res = gst_pad_push_event (demux->sinkpad, event)))
+ goto done;
+
+ if (format != GST_FORMAT_TIME) {
+ /* From here down, we only support time based seeks */
+ GST_DEBUG_OBJECT (demux, "seek not supported on format %d", format);
+ goto not_supported;
+ }
+
+ /* We need to convert to byte based seek and we need a scr_rate for that. */
+ if (demux->scr_rate_n == G_MAXUINT64 || demux->scr_rate_d == G_MAXUINT64) {
+ GST_DEBUG_OBJECT (demux, "seek not possible, no scr_rate");
+ goto not_supported;
+ }
+
+ GST_DEBUG_OBJECT (demux, "try with scr_rate interpolation");
+
+ bstart = GSTTIME_TO_BYTES ((guint64) start);
+ bstop = GSTTIME_TO_BYTES ((guint64) stop);
+
+ GST_DEBUG_OBJECT (demux, "in bytes bstart %" G_GINT64_FORMAT " bstop %"
+ G_GINT64_FORMAT, bstart, bstop);
+ bevent = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, start_type,
+ bstart, stop_type, bstop);
+
+ res = gst_pad_push_event (demux->sinkpad, bevent);
+
+ done:
+ gst_event_unref (event);
+ return res;
+
+ not_supported:
+ {
+ gst_event_unref (event);
+
+ return FALSE;
+ }
+ }
+
+ #define MAX_RECURSION_COUNT 100
+
+ /* Binary search for requested SCR */
+ static inline guint64
+ find_offset (GstPsDemux * demux, guint64 scr,
+ guint64 min_scr, guint64 min_scr_offset,
+ guint64 max_scr, guint64 max_scr_offset, int recursion_count)
+ {
+ guint64 scr_rate_n = max_scr_offset - min_scr_offset;
+ guint64 scr_rate_d = max_scr - min_scr;
+ guint64 fscr = scr;
+ guint64 offset;
+
+ if (recursion_count > MAX_RECURSION_COUNT) {
+ return -1;
+ }
+
+ offset = min_scr_offset +
+ MIN (gst_util_uint64_scale (scr - min_scr, scr_rate_n,
+ scr_rate_d), demux->sink_segment.stop);
+
+ if (!gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr, 0)) {
+ gst_ps_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
+ }
+
+ if (fscr == scr || fscr == min_scr || fscr == max_scr) {
+ return offset;
+ }
+
+ if (fscr < scr) {
+ return find_offset (demux, scr, fscr, offset, max_scr, max_scr_offset,
+ recursion_count + 1);
+ } else {
+ return find_offset (demux, scr, min_scr, min_scr_offset, fscr, offset,
+ recursion_count + 1);
+ }
+ }
+
+ static inline gboolean
+ gst_ps_demux_do_seek (GstPsDemux * demux, GstSegment * seeksegment)
+ {
+ gboolean found;
+ guint64 fscr, offset;
+ guint64 scr = GSTTIME_TO_MPEGTIME (seeksegment->position + demux->base_time);
+
+ /* In some clips the PTS values are completely unaligned with SCR values.
+ * To improve the seek in that situation we apply a factor considering the
+ * relationship between last PTS and last SCR */
+ if (demux->last_scr > demux->last_pts)
+ scr = gst_util_uint64_scale (scr, demux->last_scr, demux->last_pts);
+
+ scr = MIN (demux->last_scr, scr);
+ scr = MAX (demux->first_scr, scr);
+ fscr = scr;
+
+ GST_INFO_OBJECT (demux, "sink segment configured %" GST_SEGMENT_FORMAT
+ ", trying to go at SCR: %" G_GUINT64_FORMAT, &demux->sink_segment, scr);
+
+ offset =
+ find_offset (demux, scr, demux->first_scr, demux->first_scr_offset,
+ demux->last_scr, demux->last_scr_offset, 0);
+
+ if (offset == (guint64) - 1) {
+ return FALSE;
+ }
+
+ found = gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
+ if (!found)
+ found = gst_ps_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
+
+ while (found && fscr < scr) {
+ offset++;
+ found = gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
+ }
+
+ while (found && fscr > scr && offset > 0) {
+ offset--;
+ found = gst_ps_demux_scan_backward_ts (demux, &offset, SCAN_SCR, &fscr, 0);
+ }
+
+ GST_INFO_OBJECT (demux, "doing seek at offset %" G_GUINT64_FORMAT
+ " SCR: %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
+ offset, fscr, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (fscr)));
+
+ gst_segment_set_position (&demux->sink_segment, GST_FORMAT_BYTES, offset);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_ps_demux_handle_seek_pull (GstPsDemux * demux, GstEvent * event)
+ {
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, accurate;
+ GstSegment seeksegment;
+ GstClockTime first_pts = MPEGTIME_TO_GSTTIME (demux->first_pts);
+ guint32 seek_seqnum = gst_event_get_seqnum (event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ GST_DEBUG_OBJECT (demux, "Seek requested start %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ /* We need to convert to byte based seek and we need a scr_rate for that. */
+ if (demux->scr_rate_n == G_MAXUINT64 || demux->scr_rate_d == G_MAXUINT64)
+ goto no_scr_rate;
+
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+ accurate = flags & GST_SEEK_FLAG_ACCURATE;
+
+ /* keyframe = flags & GST_SEEK_FLAG_KEY_UNIT; *//* FIXME */
+
+ if (flush) {
+ GstEvent *event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (event, seek_seqnum);
+ /* Flush start up and downstream to make sure data flow and loops are
+ idle */
+ demux->flushing = TRUE;
+ gst_ps_demux_send_event (demux, event);
+ gst_pad_push_event (demux->sinkpad, gst_event_new_flush_start ());
+ } else {
+ /* Pause the pulling task */
+ gst_pad_pause_task (demux->sinkpad);
+ }
+
+ /* Take the stream lock */
+ GST_PAD_STREAM_LOCK (demux->sinkpad);
+
+ if (flush) {
+ /* Stop flushing upstream we need to pull */
+ demux->flushing = FALSE;
+ gst_pad_push_event (demux->sinkpad, gst_event_new_flush_stop (TRUE));
+ }
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->src_segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->src_segment);
+
+ /* Apply the seek to our segment */
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update))
+ goto seek_error;
+
+ GST_DEBUG_OBJECT (demux, "seek segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->src_segment.position) {
+ /* Do the actual seeking */
+ if (!gst_ps_demux_do_seek (demux, &seeksegment)) {
++#ifdef TIZEN_FEATURE_MPEGDEMUX_MODIFICATION
++ goto seek_error;
++#else
+ return FALSE;
++#endif
+ }
+ }
+
+ /* check the limits */
+ if (seeksegment.rate > 0.0 && first_pts != G_MAXUINT64
+ && seeksegment.start < first_pts - demux->base_time) {
+ seeksegment.position = first_pts - demux->base_time;
+ if (!accurate)
+ seeksegment.start = seeksegment.position;
+ }
+
+ /* update the rate in our src segment */
+ demux->sink_segment.rate = rate;
+
+ GST_DEBUG_OBJECT (demux, "seek segment adjusted %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush) {
+ GstEvent *event = gst_event_new_flush_stop (TRUE);
+ /* Stop flushing, the sinks are at time 0 now */
+ gst_event_set_seqnum (event, seek_seqnum);
+ gst_ps_demux_send_event (demux, event);
+ }
+
+ if (flush || seeksegment.position != demux->src_segment.position) {
+ gst_ps_demux_flush (demux);
+ }
+
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->src_segment, &seeksegment, sizeof (GstSegment));
+
+ /* Notify about the start of a new segment */
+ if (demux->src_segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT (demux),
+ gst_message_new_segment_start (GST_OBJECT (demux),
+ demux->src_segment.format, demux->src_segment.position));
+ }
+
+ /* Tell all the stream a new segment is needed */
+ gst_ps_demux_mark_discont (demux, TRUE, TRUE);
+
+ /* Update the segment_seqnum with the seek event seqnum */
+ demux->segment_seqnum = seek_seqnum;
+
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_ps_demux_loop, demux->sinkpad, NULL);
+
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+
+ gst_event_unref (event);
+ return TRUE;
+
+ /* ERRORS */
+ wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME or BYTES "
+ "formats");
+ gst_event_unref (event);
+ return FALSE;
+ }
+ no_scr_rate:
+ {
+ GST_WARNING_OBJECT (demux, "seek not possible, no scr_rate");
+ gst_event_unref (event);
+ return FALSE;
+ }
+ seek_error:
+ {
+ GST_WARNING_OBJECT (demux, "couldn't perform seek");
++#ifdef TIZEN_FEATURE_MPEGDEMUX_MODIFICATION
++ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
++#endif
+ gst_event_unref (event);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_ps_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean res = FALSE;
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (demux->random_access) {
+ res = gst_ps_demux_handle_seek_pull (demux, event);
+ } else {
+ res = gst_ps_demux_handle_seek_push (demux, event);
+ }
+ break;
+ case GST_EVENT_RECONFIGURE:{
+ GstPsStream *stream;
+
+ stream = gst_ps_demux_get_stream_from_pad (demux, pad);
+ if (stream != NULL)
+ stream->notlinked = FALSE;
+
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_push_event (demux->sinkpad, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_ps_demux_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean res = FALSE;
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+
+ GST_LOG_OBJECT (demux, "Have query of type %d on pad %" GST_PTR_FORMAT,
+ GST_QUERY_TYPE (query), pad);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstClockTime pos;
+ GstFormat format;
+
+ /* See if upstream can immediately answer */
+ res = gst_pad_peer_query (demux->sinkpad, query);
+ if (res)
+ break;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "position not supported for format: %s",
+ gst_format_get_name (format));
+ goto not_supported;
+ }
+
+ pos = demux->src_segment.position - demux->src_segment.start;
+ GST_LOG_OBJECT (demux, "Position %" GST_TIME_FORMAT, GST_TIME_ARGS (pos));
+
+ gst_query_set_position (query, format, pos);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+ gint64 duration;
+ GstQuery *byte_query;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ if (G_LIKELY (format == GST_FORMAT_TIME &&
+ GST_CLOCK_TIME_IS_VALID (demux->src_segment.duration))) {
+ gst_query_set_duration (query, GST_FORMAT_TIME,
+ demux->src_segment.duration);
+ res = TRUE;
+ break;
+ }
+
+ /* For any format other than bytes, see if upstream knows first */
+ if (format == GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (demux, "duration not supported for format: %s",
+ gst_format_get_name (format));
+ goto not_supported;
+ }
+
+ if (gst_pad_peer_query (demux->sinkpad, query)) {
+ res = TRUE;
+ break;
+ }
+
+ /* Upstream didn't know, so we can only answer TIME queries from
+ * here on */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "duration not supported for format: %s",
+ gst_format_get_name (format));
+ goto not_supported;
+ }
+
+ if (demux->mux_rate == -1) {
+ GST_DEBUG_OBJECT (demux, "duration not possible, no mux_rate");
+ goto not_supported;
+ }
+
+ byte_query = gst_query_new_duration (GST_FORMAT_BYTES);
+
+ if (!gst_pad_peer_query (demux->sinkpad, byte_query)) {
+ GST_LOG_OBJECT (demux, "query on peer pad failed");
+ gst_query_unref (byte_query);
+ goto not_supported;
+ }
+
+ gst_query_parse_duration (byte_query, &format, &duration);
+ gst_query_unref (byte_query);
+
+ GST_LOG_OBJECT (demux,
+ "query on peer pad reported bytes %" G_GUINT64_FORMAT, duration);
+
+ duration = BYTES_TO_GSTTIME ((guint64) duration);
+
+ GST_LOG_OBJECT (demux, "converted to time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration));
+
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ res = TRUE;
+ if (demux->random_access) {
+ /* In pull mode we can seek in TIME format if we have the SCR */
+ if (fmt != GST_FORMAT_TIME || demux->scr_rate_n == G_MAXUINT64
+ || demux->scr_rate_d == G_MAXUINT64) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ } else {
+ gint64 dur = -1;
+ if (GST_CLOCK_TIME_IS_VALID (demux->src_segment.duration))
+ dur = demux->src_segment.duration;
+ gst_query_set_seeking (query, fmt, TRUE, 0, dur);
+ }
+ } else {
+ if (fmt == GST_FORMAT_BYTES) {
+ /* Seeking in BYTES format not supported at all */
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ } else {
+ GstQuery *peerquery;
+ gboolean seekable;
+
+ /* Then ask upstream */
+ res = gst_pad_peer_query (demux->sinkpad, query);
+ if (res) {
+ /* If upstream can handle seeks we're done, if it
+ * can't we still have our TIME->BYTES conversion seek
+ */
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ if (seekable || fmt != GST_FORMAT_TIME)
+ goto beach;
+ }
+
+ /* We can seek if upstream supports BYTES seeks and we
+ * have the SCR
+ */
+ peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+ res = gst_pad_peer_query (demux->sinkpad, peerquery);
+ if (!res || demux->scr_rate_n == G_MAXUINT64
+ || demux->scr_rate_d == G_MAXUINT64) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ } else {
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ if (seekable)
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, -1);
+ else
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ }
+
+ gst_query_unref (peerquery);
+ res = TRUE;
+ }
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:{
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->src_segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->src_segment, format,
+ demux->src_segment.start);
+ if ((stop = demux->src_segment.stop) == -1)
+ stop = demux->src_segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&demux->src_segment, format, stop);
+
+ gst_query_set_segment (query, demux->src_segment.rate, format, start,
+ stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ beach:
+ return res;
+ not_supported:
+ return FALSE;
+ }
+
+ static void
+ gst_ps_demux_reset_psm (GstPsDemux * demux)
+ {
+ gint i;
+
+ #define FILL_TYPE(start, stop, type) \
+ for (i=start; i <= stop; i++) \
+ demux->psm[i] = type;
+
+ /* Initialize all fields to -1 first */
+ FILL_TYPE (0x00, GST_PS_DEMUX_MAX_PSM - 1, -1);
+
+ FILL_TYPE (0x20, 0x3f, ST_PS_DVD_SUBPICTURE);
+
+ FILL_TYPE (0x80, 0x87, ST_PS_AUDIO_AC3);
+ FILL_TYPE (0x88, 0x9f, ST_PS_AUDIO_DTS);
+ FILL_TYPE (0xa0, 0xaf, ST_PS_AUDIO_LPCM);
+
+ FILL_TYPE (0xc0, 0xdf, ST_AUDIO_MPEG1);
+ FILL_TYPE (0xe0, 0xef, ST_GST_VIDEO_MPEG1_OR_2);
+
+ #undef FILL_TYPE
+ }
+
+ /* ISO/IEC 13818-1:
+ * pack_header() {
+ * pack_start_code 32 bslbf -+
+ * '01' 2 bslbf |
+ * system_clock_reference_base [32..30] 3 bslbf |
+ * marker_bit 1 bslbf |
+ * system_clock_reference_base [29..15] 15 bslbf |
+ * marker_bit 1 bslbf |
+ * system_clock_reference_base [14..0] 15 bslbf |
+ * marker_bit 1 bslbf | 112 bits
+ * system_clock_reference_extension 9 ubslbf |
+ * marker_bit 1 bslbf |
+ * program_mux_rate 22 ubslbf |
+ * marker_bit 1 bslbf |
+ * marker_bit 1 bslbf |
+ * reserved 5 bslbf |
+ * pack_stuffing_length 3 ubslbf -+
+ *
+ * for (i = 0; i < pack_stuffing_length; i++) {
+ * stuffing_byte '1111 1111' 8 bslbf
+ * }
+ *
+ * 112 bits = 14 bytes, as max value for pack_stuffing_length is 7, then
+ * in total it's needed 14 + 7 = 21 bytes.
+ */
+ #define PACK_START_SIZE 21
+
+ static GstFlowReturn
+ gst_ps_demux_parse_pack_start (GstPsDemux * demux)
+ {
+ const guint8 *data;
+ guint length;
+ guint32 scr1, scr2;
+ guint64 scr, scr_adjusted, new_rate;
+ guint64 scr_rate_n;
+ guint64 scr_rate_d;
+ guint avail = gst_adapter_available (demux->adapter);
+
+ GST_LOG ("parsing pack start");
+
+ if (G_UNLIKELY (avail < PACK_START_SIZE))
+ goto need_more_data;
+
+ data = gst_adapter_map (demux->adapter, PACK_START_SIZE);
+
+ /* skip start code */
+ data += 4;
+
+ scr1 = GST_READ_UINT32_BE (data);
+ scr2 = GST_READ_UINT32_BE (data + 4);
+
+ /* fixed length to begin with, start code and two scr values */
+ length = 8 + 4;
+
+ /* start parsing the stream */
+ if ((*data & 0xc0) == 0x40) {
+ guint32 scr_ext;
+ guint32 next32;
+ guint8 stuffing_bytes;
+
+ GST_LOG ("Found MPEG2 stream");
+ demux->is_mpeg2_pack = TRUE;
+
+ /* mpeg2 has more data */
+ length += 2;
+
+ /* :2=01 ! scr:3 ! marker:1==1 ! scr:15 ! marker:1==1 ! scr:15 */
+
+ /* check markers */
+ if (G_UNLIKELY ((scr1 & 0xc4000400) != 0x44000400))
+ goto lost_sync;
+
+ scr = ((guint64) scr1 & 0x38000000) << 3;
+ scr |= ((guint64) scr1 & 0x03fff800) << 4;
+ scr |= ((guint64) scr1 & 0x000003ff) << 5;
+ scr |= ((guint64) scr2 & 0xf8000000) >> 27;
+
+ /* marker:1==1 ! scr_ext:9 ! marker:1==1 */
+ if (G_UNLIKELY ((scr2 & 0x04010000) != 0x04010000))
+ goto lost_sync;
+
+ scr_ext = (scr2 & 0x03fe0000) >> 17;
+ /* We keep the offset of this scr */
+ demux->cur_scr_offset = demux->adapter_offset + 12;
+
+ GST_LOG_OBJECT (demux, "SCR: 0x%08" G_GINT64_MODIFIER "x SCRE: 0x%08x",
+ scr, scr_ext);
+
+ if (scr_ext) {
+ scr = (scr * 300 + scr_ext % 300) / 300;
+ }
+ /* SCR has been converted into units of 90Khz ticks to make it comparable
+ to DTS/PTS, that also implies 1 tick rounding error */
+ data += 6;
+ /* PMR:22 ! :2==11 ! reserved:5 ! stuffing_len:3 */
+ next32 = GST_READ_UINT32_BE (data);
+ if (G_UNLIKELY ((next32 & 0x00000300) != 0x00000300))
+ goto lost_sync;
+
+ new_rate = (next32 & 0xfffffc00) >> 10;
+
+ stuffing_bytes = (next32 & 0x07);
+ GST_LOG_OBJECT (demux, "stuffing bytes: %d", stuffing_bytes);
+
+ data += 4;
+ length += stuffing_bytes;
+ while (stuffing_bytes--) {
+ if (*data++ != 0xff)
+ goto lost_sync;
+ }
+ } else {
+ GST_DEBUG ("Found MPEG1 stream");
+ demux->is_mpeg2_pack = FALSE;
+
+ /* check markers */
+ if (G_UNLIKELY ((scr1 & 0xf1000100) != 0x21000100))
+ goto lost_sync;
+
+ if (G_UNLIKELY ((scr2 & 0x01800001) != 0x01800001))
+ goto lost_sync;
+
+ /* :4=0010 ! scr:3 ! marker:1==1 ! scr:15 ! marker:1==1 ! scr:15 ! marker:1==1 */
+ scr = ((guint64) scr1 & 0x0e000000) << 5;
+ scr |= ((guint64) scr1 & 0x00fffe00) << 6;
+ scr |= ((guint64) scr1 & 0x000000ff) << 7;
+ scr |= ((guint64) scr2 & 0xfe000000) >> 25;
+
+ /* We keep the offset of this scr */
+ demux->cur_scr_offset = demux->adapter_offset + 8;
+
+ /* marker:1==1 ! mux_rate:22 ! marker:1==1 */
+ new_rate = (scr2 & 0x007ffffe) >> 1;
+
+ data += 8;
+ }
+
+ if (demux->ignore_scr) {
+ /* update only first/current_scr with raw scr value to start streaming
+ * after parsing 2 seconds long data with no-more-pad */
+ if (demux->first_scr == G_MAXUINT64) {
+ demux->first_scr = scr;
+ demux->first_scr_offset = demux->cur_scr_offset;
+ }
+
+ demux->current_scr = scr;
+
+ goto out;
+ }
+
+ new_rate *= MPEG_MUX_RATE_MULT;
+
+ /* scr adjusted is the new scr found + the colected adjustment */
+ scr_adjusted = scr + demux->scr_adjust;
+
+ GST_LOG_OBJECT (demux,
+ "SCR: %" G_GINT64_FORMAT " (%" G_GINT64_FORMAT "), mux_rate %"
+ G_GINT64_FORMAT ", GStreamer Time:%" GST_TIME_FORMAT,
+ scr, scr_adjusted, new_rate,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME ((guint64) scr)));
+
+ /* keep the first src in order to calculate delta time */
+ if (G_UNLIKELY (demux->first_scr == G_MAXUINT64)) {
+ gint64 diff;
+
+ demux->first_scr = scr;
+ demux->first_scr_offset = demux->cur_scr_offset;
+ demux->base_time = MPEGTIME_TO_GSTTIME (demux->first_scr);
+ GST_DEBUG_OBJECT (demux, "determined base_time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->base_time));
+ /* at begin consider the new_rate as the scr rate, bytes/clock ticks */
+ scr_rate_n = new_rate;
+ scr_rate_d = CLOCK_FREQ;
+ /* our SCR timeline might have offset wrt upstream timeline */
+ if (demux->sink_segment.format == GST_FORMAT_TIME) {
+ if (demux->sink_segment.start > demux->base_time)
+ diff = -(demux->sink_segment.start - demux->base_time);
+ else
+ diff = demux->base_time - demux->sink_segment.start;
+ if (diff > GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "diff of %" GST_TIME_FORMAT
+ " wrt upstream start %" GST_TIME_FORMAT "; adjusting base",
+ GST_TIME_ARGS (diff), GST_TIME_ARGS (demux->sink_segment.start));
+ demux->base_time += diff;
+ }
+ }
+ } else if (G_LIKELY (demux->first_scr_offset != demux->cur_scr_offset)) {
+ /* estimate byte rate related to the SCR */
+ scr_rate_n = demux->cur_scr_offset - demux->first_scr_offset;
+ scr_rate_d = scr_adjusted - demux->first_scr;
+ } else {
+ scr_rate_n = demux->scr_rate_n;
+ scr_rate_d = demux->scr_rate_d;
+ }
+
+ GST_LOG_OBJECT (demux, "%s mode scr: %" G_GUINT64_FORMAT " at %"
+ G_GUINT64_FORMAT ", first scr: %" G_GUINT64_FORMAT
+ " at %" G_GUINT64_FORMAT ", scr rate: %" G_GUINT64_FORMAT
+ "/%" G_GUINT64_FORMAT "(%f)",
+ ((demux->sink_segment.rate >= 0.0) ? "forward" : "backward"),
+ scr, demux->cur_scr_offset,
+ demux->first_scr, demux->first_scr_offset,
+ scr_rate_n, scr_rate_d, (float) scr_rate_n / scr_rate_d);
+
+ /* adjustment of the SCR */
+ if (G_LIKELY (demux->current_scr != G_MAXUINT64)) {
+ guint64 diff;
+ guint64 old_scr, old_mux_rate, bss, adjust = 0;
+
+ /* keep SCR of the previous packet */
+ old_scr = demux->current_scr;
+ old_mux_rate = demux->mux_rate;
+
+ /* Bytes since SCR is the amount we placed in the adapter since then
+ * (demux->bytes_since_scr) minus the amount remaining in the adapter,
+ * clamped to >= 0 */
+ bss = MAX (0, (gint) (demux->bytes_since_scr - avail));
+
+ /* estimate the new SCR using the previous one according the notes
+ on point 2.5.2.2 of the ISO/IEC 13818-1 document */
+ if (old_mux_rate != 0)
+ adjust = (bss * CLOCK_FREQ) / old_mux_rate;
+
+ if (demux->sink_segment.rate >= 0.0)
+ demux->next_scr = old_scr + adjust;
+ else
+ demux->next_scr = old_scr - adjust;
+
+ GST_LOG_OBJECT (demux,
+ "bss: %" G_GUINT64_FORMAT ", next_scr: %" G_GUINT64_FORMAT
+ ", old_scr: %" G_GUINT64_FORMAT ", scr: %" G_GUINT64_FORMAT,
+ bss, demux->next_scr, old_scr, scr_adjusted);
+
+ /* calculate the absolute deference between the last scr and
+ the new one */
+ if (G_UNLIKELY (old_scr > scr_adjusted))
+ diff = old_scr - scr_adjusted;
+ else
+ diff = scr_adjusted - old_scr;
+
+ /* if the difference is more than 1 second we need to reconfigure
+ adjustment */
+ if (G_UNLIKELY (diff > CLOCK_FREQ)) {
+ demux->scr_adjust = demux->next_scr - scr;
+ GST_LOG_OBJECT (demux, "discont found, diff: %" G_GINT64_FORMAT
+ ", adjust %" G_GINT64_FORMAT, diff, demux->scr_adjust);
+ scr_adjusted = demux->next_scr;
+ /* don't update rate estimation on disconts */
+ scr_rate_n = demux->scr_rate_n;
+ scr_rate_d = demux->scr_rate_d;
+ } else {
+ demux->next_scr = scr_adjusted;
+ }
+ }
+
+ /* update the current_scr and rate members */
+ demux->mux_rate = new_rate;
+ demux->current_scr = scr_adjusted;
+ demux->scr_rate_n = scr_rate_n;
+ demux->scr_rate_d = scr_rate_d;
+
+ /* Reset the bytes_since_scr value to count the data remaining in the
+ * adapter */
+ demux->bytes_since_scr = avail;
+
+ /* Now check for all streams if they're behind the new SCR and if
+ * they are then move them forward to the SCR position */
+ gst_ps_demux_send_gap_updates (demux,
+ MPEGTIME_TO_GSTTIME (demux->current_scr - demux->first_scr));
+
+ out:
+ gst_adapter_unmap (demux->adapter);
+ gst_adapter_flush (demux->adapter, length);
+ ADAPTER_OFFSET_FLUSH (length);
+
+ return GST_FLOW_OK;
+
+ lost_sync:
+ {
+ GST_DEBUG_OBJECT (demux, "lost sync");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_LOST_SYNC;
+ }
+ need_more_data:
+ {
+ GST_DEBUG_OBJECT (demux, "need more data");
+ return GST_FLOW_NEED_MORE_DATA;
+ }
+ }
+
+ /* ISO/IEC 13818-1:
+ * system_header () {
+ * system_header_start_code 32 bslbf -+
+ * header_length 16 uimsbf |
+ * marker_bit 1 bslbf |
+ * rate_bound 22 uimsbf |
+ * marker_bit 1 bslbf |
+ * audio_bound 6 uimsbf |
+ * fixed_flag 1 bslbf |
+ * CSPS_flag 1 bslbf | 96 bits
+ * system_audio_lock_flag 1 bslbf |
+ * system_video_lock_flag 1 bslbf |
+ * marker_bit 1 bslbf |
+ * video_bound 5 uimsbf |
+ * packet_rate_restriction_flag 1 bslbf |
+ * reserved_bits 7 bslbf -+
+ * while (nextbits () = = '1') {
+ * stream_id 8 uimsbf -+
+ * '11' 2 bslbf | 24 bits
+ * P-STD_buffer_bound_scale 1 bslbf |
+ * P-STD_buffer_size_bound 13 uimsbf -+
+ * }
+ * }
+ * 96 bits = 12 bytes, 24 bits = 3 bytes.
+ */
+
+ static GstFlowReturn
+ gst_ps_demux_parse_sys_head (GstPsDemux * demux)
+ {
+ guint16 length;
+ const guint8 *data;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean csps;
+ #endif
+
+ if (gst_adapter_available (demux->adapter) < 6)
+ goto need_more_data;
+
+ /* start code + length */
+ data = gst_adapter_map (demux->adapter, 6);
+
+ /* skip start code */
+ data += 4;
+
+ length = GST_READ_UINT16_BE (data);
+ GST_DEBUG_OBJECT (demux, "length %d", length);
+
+ length += 6;
+
+ gst_adapter_unmap (demux->adapter);
+ if (gst_adapter_available (demux->adapter) < length)
+ goto need_more_data;
+
+ data = gst_adapter_map (demux->adapter, length);
+
+ /* skip start code and length */
+ data += 6;
+
+ /* marker:1==1 ! rate_bound:22 | marker:1==1 */
+ if ((*data & 0x80) != 0x80)
+ goto marker_expected;
+
+ {
+ guint32 rate_bound;
+
+ if ((data[2] & 0x01) != 0x01)
+ goto marker_expected;
+
+ rate_bound = ((guint32) data[0] & 0x7f) << 15;
+ rate_bound |= ((guint32) data[1]) << 7;
+ rate_bound |= ((guint32) data[2] & 0xfe) >> 1;
+ rate_bound *= MPEG_MUX_RATE_MULT;
+
+ GST_DEBUG_OBJECT (demux, "rate bound %u", rate_bound);
+
+ data += 3;
+ }
+
+ /* audio_bound:6==1 ! fixed:1 | constrained:1 */
+ {
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint8 audio_bound;
+ gboolean fixed;
+
+ /* max number of simultaneous audio streams active */
+ audio_bound = (data[0] & 0xfc) >> 2;
+ /* fixed or variable bitrate */
+ fixed = (data[0] & 0x02) == 0x02;
+ /* meeting constraints */
+ csps = (data[0] & 0x01) == 0x01;
+
+ GST_DEBUG_OBJECT (demux, "audio_bound %d, fixed %d, constrained %d",
+ audio_bound, fixed, csps);
+ #endif
+ data += 1;
+ }
+
+ /* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
+ {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean audio_lock;
+ gboolean video_lock;
+ guint8 video_bound;
+
+ audio_lock = (data[0] & 0x80) == 0x80;
+ video_lock = (data[0] & 0x40) == 0x40;
+ #endif
+
+ if ((data[0] & 0x20) != 0x20)
+ goto marker_expected;
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* max number of simultaneous video streams active */
+ video_bound = (data[0] & 0x1f);
+
+ GST_DEBUG_OBJECT (demux, "audio_lock %d, video_lock %d, video_bound %d",
+ audio_lock, video_lock, video_bound);
+ #endif
+ data += 1;
+ }
+
+ /* packet_rate_restriction:1 | reserved:7==0x7F */
+ {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean packet_rate_restriction;
+ #endif
+ if ((data[0] & 0x7f) != 0x7f)
+ goto marker_expected;
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* only valid if csps is set */
+ if (csps) {
+ packet_rate_restriction = (data[0] & 0x80) == 0x80;
+
+ GST_DEBUG_OBJECT (demux, "packet_rate_restriction %d",
+ packet_rate_restriction);
+ }
+ #endif
+ }
+ data += 1;
+
+ {
+ gint stream_count = (length - 12) / 3;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "number of streams: %d ", stream_count);
+
+ for (i = 0; i < stream_count; i++) {
+ guint8 stream_id;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean STD_buffer_bound_scale;
+ guint16 STD_buffer_size_bound;
+ guint32 buf_byte_size_bound;
+ #endif
+ stream_id = *data++;
+ if (!(stream_id & 0x80))
+ goto sys_len_error;
+
+ /* check marker bits */
+ if ((*data & 0xC0) != 0xC0)
+ goto no_placeholder_bits;
+ #ifndef GST_DISABLE_GST_DEBUG
+ STD_buffer_bound_scale = *data & 0x20;
+ STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
+ STD_buffer_size_bound |= *data++;
+
+ if (STD_buffer_bound_scale == 0) {
+ buf_byte_size_bound = STD_buffer_size_bound * 128;
+ } else {
+ buf_byte_size_bound = STD_buffer_size_bound * 1024;
+ }
+
+ GST_DEBUG_OBJECT (demux, "STD_buffer_bound_scale %d",
+ STD_buffer_bound_scale);
+ GST_DEBUG_OBJECT (demux, "STD_buffer_size_bound %d or %d bytes",
+ STD_buffer_size_bound, buf_byte_size_bound);
+ #endif
+ }
+ }
+
+ gst_adapter_unmap (demux->adapter);
+ gst_adapter_flush (demux->adapter, length);
+ ADAPTER_OFFSET_FLUSH (length);
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ marker_expected:
+ {
+ GST_DEBUG_OBJECT (demux, "expecting marker");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_LOST_SYNC;
+ }
+ no_placeholder_bits:
+ {
+ GST_DEBUG_OBJECT (demux, "expecting placeholder bit values"
+ " '11' after stream id");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_LOST_SYNC;
+ }
+ sys_len_error:
+ {
+ GST_DEBUG_OBJECT (demux, "error in system header length");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_LOST_SYNC;
+ }
+ need_more_data:
+ {
+ GST_DEBUG_OBJECT (demux, "need more data");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_NEED_MORE_DATA;
+ }
+ }
+
+ static GstFlowReturn
+ gst_ps_demux_parse_psm (GstPsDemux * demux)
+ {
+ guint16 psm_length, info_length = 0, es_map_length = 0;
+ guint8 psm_version = 0;
+ GstByteReader br;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean applicable;
+ #endif
+
+ /* Need at least 6 bytes for start code + length */
+ if (gst_adapter_available (demux->adapter) < 6)
+ goto need_more_data;
+
+ {
+ const guint8 *data;
+
+ /* start code + length */
+ data = gst_adapter_map (demux->adapter, 6);
+ /* skip start code */
+ data += 4;
+ psm_length = GST_READ_UINT16_BE (data);
+ GST_DEBUG_OBJECT (demux, "PSM length %u", psm_length);
+
+ if (G_UNLIKELY (psm_length > 0x3FA))
+ goto psm_len_error;
+ psm_length += 6; /* Add start code + size to length */
+
+ gst_adapter_unmap (demux->adapter);
+
+ if (gst_adapter_available (demux->adapter) < psm_length)
+ goto need_more_data;
+
+ data = gst_adapter_map (demux->adapter, psm_length);
+
+ gst_byte_reader_init (&br, data, psm_length);
+ }
+
+ /* skip start code and length */
+ if (!gst_byte_reader_skip (&br, 6))
+ goto fail_invalid;
+
+ /* Read PSM applicable bit together with version */
+ if (!gst_byte_reader_get_uint8 (&br, &psm_version))
+ goto fail_invalid;
+ #ifndef GST_DISABLE_GST_DEBUG
+ applicable = (psm_version & 0x80) >> 7;
+ #endif
+ psm_version &= 0x1F;
+ GST_DEBUG_OBJECT (demux, "PSM version %u (applicable now %u)", psm_version,
+ applicable);
+
+ /* Jump over the next byte (marker bit) */
+ if (!gst_byte_reader_skip (&br, 1))
+ goto fail_invalid;
+
+ /* Read PS info length */
+ if (!gst_byte_reader_get_uint16_be (&br, &info_length))
+ goto fail_invalid;
+ GST_DEBUG_OBJECT (demux, "PS info length %u bytes", info_length);
+ /* Skip the PS info, we don't use it */
+ if (!gst_byte_reader_skip (&br, info_length))
+ goto fail_invalid;
+
+ /* Read ES map length */
+ if (!gst_byte_reader_get_uint16_be (&br, &es_map_length))
+ goto fail_invalid;
+ GST_DEBUG_OBJECT (demux, "ES map length %u bytes", es_map_length);
+
+ /* Now read the ES map */
+ {
+ GstByteReader es_map_br;
+ if (!gst_byte_reader_get_sub_reader (&br, &es_map_br, es_map_length))
+ goto fail_invalid;
+
+ while (gst_byte_reader_get_remaining (&es_map_br) >= 4) {
+ guint8 stream_type = 0, stream_id = 0;
+ guint16 stream_info_length = 0;
+
+ if (!gst_byte_reader_get_uint8 (&es_map_br, &stream_type) ||
+ !gst_byte_reader_get_uint8 (&es_map_br, &stream_id) ||
+ !gst_byte_reader_get_uint16_be (&es_map_br, &stream_info_length))
+ break;
+
+ GST_DEBUG_OBJECT (demux,
+ "Stream type %02X with id %02X and %u bytes info", stream_type,
+ stream_id, stream_info_length);
+
+ if (G_LIKELY (stream_id != 0xbd))
+ demux->psm[stream_id] = stream_type;
+ else {
+ /* Ignore stream type for private_stream_1 and discover it looking at
+ * the stream data.
+ * Fixes demuxing some clips with lpcm that was wrongly declared as
+ * mpeg audio */
+ GST_DEBUG_OBJECT (demux, "stream type for private_stream_1 ignored");
+ }
+
+ /* FIXME: We could use the descriptors instead of skipping them */
+ if (!gst_byte_reader_skip (&es_map_br, stream_info_length))
+ break;
+ }
+ }
+ /* We ignore the 4-byte CRC at the end */
+
+ gst_adapter_unmap (demux->adapter);
+ gst_adapter_flush (demux->adapter, psm_length);
+ ADAPTER_OFFSET_FLUSH (psm_length);
+ return GST_FLOW_OK;
+
+ fail_invalid:
+ GST_DEBUG_OBJECT (demux, "Failed to parse PSM. Skipping");
+ gst_adapter_unmap (demux->adapter);
+ gst_adapter_flush (demux->adapter, psm_length);
+ ADAPTER_OFFSET_FLUSH (psm_length);
+ return GST_FLOW_LOST_SYNC;
+ psm_len_error:
+ {
+ GST_DEBUG_OBJECT (demux, "error in PSM length");
+ gst_adapter_unmap (demux->adapter);
+ return GST_FLOW_LOST_SYNC;
+ }
+ need_more_data:
+ {
+ GST_DEBUG_OBJECT (demux, "need more data");
+ return GST_FLOW_NEED_MORE_DATA;
+ }
+ }
+
+ static void
+ gst_ps_demux_resync_cb (GstPESFilter * filter, GstPsDemux * demux)
+ {
+ }
+
+ static GstFlowReturn
+ gst_ps_demux_data_cb (GstPESFilter * filter, gboolean first,
+ GstBuffer * buffer, GstPsDemux * demux)
+ {
+ GstBuffer *out_buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint stream_type;
+ guint32 start_code;
+ guint8 id;
+ GstMapInfo map;
+ gsize datalen;
+ guint offset = 0;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ datalen = map.size;
+ start_code = filter->start_code;
+ id = filter->id;
+ if (first) {
+ gint layer = 0;
+ /* find the stream type */
+ stream_type = demux->psm[id];
+ if (stream_type == -1) {
+ /* no stream type, if PS1, get the new id */
+ if (start_code == ID_PRIVATE_STREAM_1 && datalen >= 2) {
+ /* VDR writes A52 streams without any header bytes
+ * (see ftp://ftp.mplayerhq.hu/MPlayer/samples/MPEG-VOB/vdr-AC3) */
+ if (datalen >= 4) {
+ guint hdr = GST_READ_UINT32_BE (map.data);
+ if (G_UNLIKELY ((hdr & 0xffff0000) == AC3_SYNC_WORD)) {
+ id = 0x80;
+ stream_type = demux->psm[id] = ST_GST_AUDIO_RAWA52;
+ GST_DEBUG_OBJECT (demux, "Found VDR raw A52 stream");
+ }
+ }
+
+ if (G_LIKELY (stream_type == -1)) {
+ /* new id is in the first byte */
+ id = map.data[offset++];
+ datalen--;
+ /* and remap */
+ stream_type = demux->psm[id];
+ /* Now, if it's a subpicture stream - no more, otherwise
+ * take the first byte too, since it's the frame count in audio
+ * streams and our backwards compat convention is to strip it off */
+ if (stream_type != ST_PS_DVD_SUBPICTURE) {
+ /* Number of audio frames in this packet */
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint8 nframes;
+ nframes = map.data[offset];
+ GST_LOG_OBJECT (demux, "private type 0x%02x, %d frames", id,
+ nframes);
+ #endif
+ offset++;
+ datalen--;
+ } else {
+ GST_LOG_OBJECT (demux, "private type 0x%02x, stream type %d",
+ id, stream_type);
+ }
+ }
+ }
+ if (stream_type == -1)
+ goto unknown_stream_type;
+ } else if (stream_type == ST_AUDIO_MPEG1 || stream_type == ST_AUDIO_MPEG2) {
+ if (datalen >= 2) {
+ guint hdr = GST_READ_UINT16_BE (map.data);
+ if ((hdr & 0xfff0) == 0xfff0) {
+ switch (hdr & 0x06) {
+ case 0x6:
+ layer = 1;
+ break;
+ case 0x4:
+ layer = 2;
+ break;
+ case 0x2:
+ layer = 3;
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unknown mpeg audio layer");
+ }
+ }
+ }
+ }
+
+ if (filter->pts != -1) {
+ demux->next_pts = filter->pts + demux->scr_adjust;
+ GST_LOG_OBJECT (demux, "stream 0x%02x PTS = orig %" G_GUINT64_FORMAT
+ " (%" G_GUINT64_FORMAT ")", id, filter->pts, demux->next_pts);
+ } else
+ demux->next_pts = G_MAXUINT64;
+ if (filter->dts != -1) {
+ demux->next_dts = filter->dts + demux->scr_adjust;
+ GST_LOG_OBJECT (demux, "stream 0x%02x DTS = orig %" G_GUINT64_FORMAT
+ " (%" G_GUINT64_FORMAT ")", id, filter->dts, demux->next_dts);
+ } else {
+ demux->next_dts = demux->next_pts;
+ }
+
+ demux->current_stream =
+ gst_ps_demux_get_stream (demux, id, stream_type, layer);
+ }
+
+ if (G_UNLIKELY (demux->current_stream == NULL)) {
+ GST_DEBUG_OBJECT (demux, "Dropping buffer for unknown stream id 0x%02x",
+ id);
+ goto done;
+ }
+
+ /* After 2 seconds of bitstream emit no more pads */
+ if (demux->need_no_more_pads
+ && (demux->current_scr - demux->first_scr) > 2 * CLOCK_FREQ) {
+ GST_DEBUG_OBJECT (demux, "no more pads, notifying");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->need_no_more_pads = FALSE;
+ }
+
+ /* If the stream is not-linked, don't bother creating a sub-buffer
+ * to send to it, unless we're processing a discont (which resets
+ * the not-linked status and tries again */
+ if (demux->current_stream->discont) {
+ GST_DEBUG_OBJECT (demux, "stream is discont");
+ demux->current_stream->notlinked = FALSE;
+ }
+
+ if (demux->current_stream->notlinked == FALSE) {
+ out_buf =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, datalen);
+ ret = gst_ps_demux_send_data (demux, demux->current_stream, out_buf);
+ if (ret == GST_FLOW_NOT_LINKED) {
+ demux->current_stream->notlinked = TRUE;
+ }
+ }
+
+ done:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return ret;
+ /* ERRORS */
+ unknown_stream_type:
+ {
+ GST_DEBUG_OBJECT (demux, "unknown stream type %02x", id);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_ps_demux_resync (GstPsDemux * demux, gboolean save)
+ {
+ const guint8 *data;
+ gint avail;
+ guint32 code;
+ gint offset;
+ gboolean found;
+ avail = gst_adapter_available (demux->adapter);
+ if (G_UNLIKELY (avail < 4))
+ goto need_data;
+ /* Common case, read 4 bytes an check it */
+ data = gst_adapter_map (demux->adapter, 4);
+ /* read currect code */
+ code = GST_READ_UINT32_BE (data);
+ /* The common case is that the sync code is at 0 bytes offset */
+ if (G_LIKELY ((code & 0xffffff00) == 0x100L)) {
+ GST_LOG_OBJECT (demux, "Found resync code %08x after 0 bytes", code);
+ demux->last_sync_code = code;
+ gst_adapter_unmap (demux->adapter);
+ return TRUE;
+ }
+
+ /* Otherwise, we are starting at byte 4 and we need to search
+ the sync code in all available data in the adapter */
+ offset = 4;
+ if (offset >= avail)
+ goto need_data; /* Not enough data to find sync */
+ data = gst_adapter_map (demux->adapter, avail);
+ do {
+ code = (code << 8) | data[offset++];
+ found = (code & 0xffffff00) == 0x100L;
+ } while (offset < avail && !found);
+ gst_adapter_unmap (demux->adapter);
+ if (!save || demux->sink_segment.rate >= 0.0) {
+ GST_LOG_OBJECT (demux, "flushing %d bytes", offset - 4);
+ /* forward playback, we can discard and flush the skipped bytes */
+ gst_adapter_flush (demux->adapter, offset - 4);
+ ADAPTER_OFFSET_FLUSH (offset - 4);
+ } else {
+ if (found) {
+ GST_LOG_OBJECT (demux, "reverse saving %d bytes", offset - 4);
+ /* reverse playback, we keep the flushed bytes and we will append them to
+ * the next buffer in the chain function, which is the previous buffer in
+ * the stream. */
+ gst_adapter_push (demux->rev_adapter,
+ gst_adapter_take_buffer (demux->adapter, offset - 4));
+ } else {
+ GST_LOG_OBJECT (demux, "reverse saving %d bytes", avail);
+ /* nothing found, keep all bytes */
+ gst_adapter_push (demux->rev_adapter,
+ gst_adapter_take_buffer (demux->adapter, avail));
+ }
+ }
+
+ if (found) {
+ GST_LOG_OBJECT (demux, "Found resync code %08x after %d bytes",
+ code, offset - 4);
+ demux->last_sync_code = code;
+ } else {
+ GST_LOG_OBJECT (demux, "No resync after skipping %d", offset);
+ }
+
+ return found;
+ need_data:
+ {
+ GST_LOG_OBJECT (demux, "we need more data for resync %d", avail);
+ return FALSE;
+ }
+ }
+
+ static inline gboolean
+ gst_ps_demux_is_pes_sync (guint32 sync)
+ {
+ return ((sync & 0xfc) == 0xbc) ||
+ ((sync & 0xe0) == 0xc0) || ((sync & 0xf0) == 0xe0);
+ }
+
+ static inline gboolean
+ gst_ps_demux_scan_ts (GstPsDemux * demux, const guint8 * data,
+ SCAN_MODE mode, guint64 * rts, const guint8 * end)
+ {
+ gboolean ret = FALSE;
+ guint32 scr1, scr2;
+ guint64 scr;
+ guint64 pts, dts;
+ guint32 code;
+ guint16 len;
+ /* read the 4 bytes for the sync code */
+ code = GST_READ_UINT32_BE (data);
+ if (G_LIKELY (code != ID_PS_PACK_START_CODE))
+ goto beach;
+ if (data + 12 > end)
+ goto beach;
+ /* skip start code */
+ data += 4;
+ scr1 = GST_READ_UINT32_BE (data);
+ scr2 = GST_READ_UINT32_BE (data + 4);
+ /* start parsing the stream */
+ if ((*data & 0xc0) == 0x40) {
+ /* MPEG-2 PACK header */
+ guint32 scr_ext;
+ guint32 next32;
+ guint8 stuffing_bytes;
+ /* :2=01 ! scr:3 ! marker:1==1 ! scr:15 ! marker:1==1 ! scr:15 */
+ /* check markers */
+ if ((scr1 & 0xc4000400) != 0x44000400)
+ goto beach;
+ scr = ((guint64) scr1 & 0x38000000) << 3;
+ scr |= ((guint64) scr1 & 0x03fff800) << 4;
+ scr |= ((guint64) scr1 & 0x000003ff) << 5;
+ scr |= ((guint64) scr2 & 0xf8000000) >> 27;
+ /* marker:1==1 ! scr_ext:9 ! marker:1==1 */
+ if ((scr2 & 0x04010000) != 0x04010000)
+ goto beach;
+ scr_ext = (scr2 & 0x03fe0000) >> 17;
+ if (scr_ext) {
+ scr = (scr * 300 + scr_ext % 300) / 300;
+ }
+ /* SCR has been converted into units of 90Khz ticks to make it comparable
+ to DTS/PTS, that also implies 1 tick rounding error */
+ data += 6;
+
+ if (data + 4 > end)
+ goto beach;
+ /* PMR:22 ! :2==11 ! reserved:5 ! stuffing_len:3 */
+ next32 = GST_READ_UINT32_BE (data);
+ if ((next32 & 0x00000300) != 0x00000300)
+ goto beach;
+ stuffing_bytes = (next32 & 0x07);
+ data += 4;
+ if (data + stuffing_bytes > end)
+ goto beach;
+ while (stuffing_bytes--) {
+ if (*data++ != 0xff)
+ goto beach;
+ }
+ } else {
+ /* MPEG-1 pack header */
+ /* check markers */
+ if ((scr1 & 0xf1000100) != 0x21000100)
+ goto beach;
+ if ((scr2 & 0x01800001) != 0x01800001)
+ goto beach;
+ /* :4=0010 ! scr:3 ! marker:1==1 ! scr:15 ! marker:1==1 ! scr:15 ! marker:1==1 */
+ scr = ((guint64) scr1 & 0x0e000000) << 5;
+ scr |= ((guint64) scr1 & 0x00fffe00) << 6;
+ scr |= ((guint64) scr1 & 0x000000ff) << 7;
+ scr |= ((guint64) scr2 & 0xfe000000) >> 25;
+ data += 8;
+ }
+
+ if (mode == SCAN_SCR) {
+ *rts = scr;
+ ret = TRUE;
+ goto beach;
+ }
+
+ /* Possible optional System header here */
+ if (data + 8 > end)
+ goto beach;
+
+ code = GST_READ_UINT32_BE (data);
+ len = GST_READ_UINT16_BE (data + 4);
+ if (code == ID_PS_SYSTEM_HEADER_START_CODE) {
+ /* Found a system header, skip it */
+ /* Check for sufficient data - system header, plus enough
+ * left over for the PES packet header */
+ if (data + 6 + len + 6 > end)
+ return FALSE;
+ data += len + 6;
+ /* read the 4 bytes for the PES sync code */
+ code = GST_READ_UINT32_BE (data);
+ len = GST_READ_UINT16_BE (data + 4);
+ }
+
+ /* Check we have enough data left for reading the PES packet */
+ if (data + 6 + len > end)
+ return FALSE;
+ if (!gst_ps_demux_is_pes_sync (code))
+ goto beach;
+ switch (code) {
+ case ID_PS_PROGRAM_STREAM_MAP:
+ case ID_PRIVATE_STREAM_2:
+ case ID_ECM_STREAM:
+ case ID_EMM_STREAM:
+ case ID_PROGRAM_STREAM_DIRECTORY:
+ case ID_DSMCC_STREAM:
+ case ID_ITU_TREC_H222_TYPE_E_STREAM:
+ case ID_PADDING_STREAM:
+ goto beach;
+ default:
+ break;
+ }
+
+ /* skip sync code and size */
+ data += 6;
+ pts = dts = -1;
+ /* stuffing bits, first two bits are '10' for mpeg2 pes so this code is
+ * not triggered. */
+ while (TRUE) {
+ if (*data != 0xff)
+ break;
+ data++;
+ }
+
+ /* STD buffer size, never for mpeg2 */
+ if ((*data & 0xc0) == 0x40)
+ data += 2;
+ /* PTS but no DTS, never for mpeg2 */
+ if ((*data & 0xf0) == 0x20) {
+ READ_TS (data, pts, beach);
+ }
+ /* PTS and DTS, never for mpeg2 */
+ else if ((*data & 0xf0) == 0x30) {
+ READ_TS (data, pts, beach);
+ READ_TS (data, dts, beach);
+ } else if ((*data & 0xc0) == 0x80) {
+ /* mpeg2 case */
+ guchar flags;
+ /* 2: '10'
+ * 2: PES_scrambling_control
+ * 1: PES_priority
+ * 1: data_alignment_indicator
+ * 1: copyright
+ * 1: original_or_copy
+ */
+ flags = *data++;
+ if ((flags & 0xc0) != 0x80)
+ goto beach;
+ /* 2: PTS_DTS_flags
+ * 1: ESCR_flag
+ * 1: ES_rate_flag
+ * 1: DSM_trick_mode_flag
+ * 1: additional_copy_info_flag
+ * 1: PES_CRC_flag
+ * 1: PES_extension_flag
+ */
+ flags = *data++;
+ /* 8: PES_header_data_length */
+ data++;
+ /* only DTS: this is invalid */
+ if ((flags & 0xc0) == 0x40)
+ goto beach;
+ /* check for PTS */
+ if ((flags & 0x80)) {
+ READ_TS (data, pts, beach);
+ }
+ /* check for DTS */
+ if ((flags & 0x40)) {
+ READ_TS (data, dts, beach);
+ }
+ }
+
+ if (mode == SCAN_DTS && dts != (guint64) - 1) {
+ *rts = dts;
+ ret = TRUE;
+ }
+
+ if (mode == SCAN_PTS && pts != (guint64) - 1) {
+ *rts = pts;
+ ret = TRUE;
+ }
+ beach:
+ return ret;
+ }
+
+ static inline gboolean
+ gst_ps_demux_scan_forward_ts (GstPsDemux * demux, guint64 * pos,
+ SCAN_MODE mode, guint64 * rts, gint limit)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+ guint64 offset = *pos;
+ gboolean found = FALSE;
+ guint64 ts = 0;
+ guint scan_sz = (mode == SCAN_SCR ? SCAN_SCR_SZ : SCAN_PTS_SZ);
+ guint cursor, to_read = BLOCK_SZ;
+ guint end_scan;
+ GstMapInfo map;
+ do {
+ /* Check we can get at least scan_sz bytes */
+ if (offset + scan_sz > demux->sink_segment.stop)
+ return FALSE;
+ /* Don't go further than 'limit' bytes */
+ if (limit && offset > *pos + limit)
+ return FALSE;
+ if (offset + to_read > demux->sink_segment.stop)
+ to_read = demux->sink_segment.stop - offset;
+ /* read some data */
+ buffer = NULL;
+ ret = gst_pad_pull_range (demux->sinkpad, offset, to_read, &buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ return FALSE;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ /* may get a short buffer at the end of the file */
+ if (G_UNLIKELY (map.size <= scan_sz)) {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return FALSE;
+ }
+
+ end_scan = map.size - scan_sz;
+ /* scan the block */
+ for (cursor = 0; !found && cursor <= end_scan; cursor++) {
+ found = gst_ps_demux_scan_ts (demux, map.data + cursor, mode, &ts,
+ map.data + map.size);
+ }
+
+ /* done with the buffer, unref it */
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ if (found) {
+ *rts = ts;
+ *pos = offset + cursor - 1;
+ } else {
+ offset += cursor;
+ }
+ } while (!found && offset < demux->sink_segment.stop);
+ return found;
+ }
+
+ static inline gboolean
+ gst_ps_demux_scan_backward_ts (GstPsDemux * demux, guint64 * pos,
+ SCAN_MODE mode, guint64 * rts, gint limit)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+ guint64 offset = *pos;
+ gboolean found = FALSE;
+ guint64 ts = 0;
+ guint scan_sz = (mode == SCAN_SCR ? SCAN_SCR_SZ : SCAN_PTS_SZ);
+ guint cursor, to_read = BLOCK_SZ;
+ guint start_scan;
+ guint8 *data;
+ GstMapInfo map;
+ do {
+ /* Check we have at least scan_sz bytes available */
+ if (offset < scan_sz - 1)
+ return FALSE;
+ /* Don't go backward past the start or 'limit' bytes */
+ if (limit && offset + limit < *pos)
+ return FALSE;
+ if (offset > BLOCK_SZ)
+ offset -= BLOCK_SZ;
+ else {
+ to_read = offset + 1;
+ offset = 0;
+ }
+ /* read some data */
+ buffer = NULL;
+ ret = gst_pad_pull_range (demux->sinkpad, offset, to_read, &buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ return FALSE;
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ /* may get a short buffer at the end of the file */
+ if (G_UNLIKELY (map.size <= scan_sz)) {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return FALSE;
+ }
+
+ start_scan = map.size - scan_sz;
+ data = map.data + start_scan;
+ /* scan the block */
+ for (cursor = (start_scan + 1); !found && cursor > 0; cursor--) {
+ found = gst_ps_demux_scan_ts (demux, data--, mode, &ts,
+ map.data + map.size);
+ }
+
+ /* done with the buffer, unref it */
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ if (found) {
+ *rts = ts;
+ *pos = offset + cursor;
+ }
+
+ } while (!found && offset > 0);
+ return found;
+ }
+
+ static inline gboolean
+ gst_ps_sink_get_duration (GstPsDemux * demux)
+ {
+ gboolean res = FALSE;
+ GstPad *peer;
+ GstFormat format = GST_FORMAT_BYTES;
+ gint64 length = 0;
+ guint64 offset;
+ guint i;
+ guint64 scr = 0;
+ /* init the sink segment */
+ gst_segment_init (&demux->sink_segment, format);
+ /* get peer to figure out length */
+ if ((peer = gst_pad_get_peer (demux->sinkpad)) == NULL)
+ goto beach;
+ res = gst_pad_query_duration (peer, format, &length);
+ gst_object_unref (peer);
+ if (!res || length <= 0)
+ goto beach;
+ GST_DEBUG_OBJECT (demux, "file length %" G_GINT64_FORMAT, length);
+ /* update the sink segment */
+ demux->sink_segment.stop = length;
+ gst_segment_set_duration (&demux->sink_segment, format, length);
+ gst_segment_set_position (&demux->sink_segment, format, 0);
+ /* Scan for notorious SCR and PTS to calculate the duration */
+ /* scan for first SCR in the stream */
+ offset = demux->sink_segment.start;
+ gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_SCR,
+ &demux->first_scr, DURATION_SCAN_LIMIT);
+ GST_DEBUG_OBJECT (demux,
+ "First SCR: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
+ " in packet starting at %" G_GUINT64_FORMAT, demux->first_scr,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->first_scr)), offset);
+ demux->first_scr_offset = offset;
+ /* scan for last SCR in the stream */
+ offset = demux->sink_segment.stop;
+ gst_ps_demux_scan_backward_ts (demux, &offset, SCAN_SCR,
+ &demux->last_scr, DURATION_SCAN_LIMIT);
+ GST_DEBUG_OBJECT (demux,
+ "Last SCR: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
+ " in packet starting at %" G_GUINT64_FORMAT, demux->last_scr,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->last_scr)), offset);
+ demux->last_scr_offset = offset;
+ /* scan for first PTS in the stream */
+ offset = demux->sink_segment.start;
+ gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_PTS,
+ &demux->first_pts, DURATION_SCAN_LIMIT);
+ GST_DEBUG_OBJECT (demux,
+ "First PTS: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
+ " in packet starting at %" G_GUINT64_FORMAT, demux->first_pts,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->first_pts)), offset);
+ if (demux->first_pts != G_MAXUINT64) {
+ /* scan for last PTS in the stream */
+ offset = demux->sink_segment.stop;
+ gst_ps_demux_scan_backward_ts (demux, &offset, SCAN_PTS,
+ &demux->last_pts, DURATION_SCAN_LIMIT);
+ GST_DEBUG_OBJECT (demux,
+ "Last PTS: %" G_GINT64_FORMAT " %" GST_TIME_FORMAT
+ " in packet starting at %" G_GUINT64_FORMAT, demux->last_pts,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->last_pts)), offset);
+ }
+ /* Detect wrong SCR values */
+ if (demux->first_scr > demux->last_scr) {
+ GST_DEBUG_OBJECT (demux, "Wrong SCR values detected, searching for "
+ "a better first SCR value");
+ offset = demux->first_scr_offset;
+ for (i = 0; i < 10; i++) {
+ offset++;
+ gst_ps_demux_scan_forward_ts (demux, &offset, SCAN_SCR, &scr, 0);
+ if (scr < demux->last_scr) {
+ demux->first_scr = scr;
+ demux->first_scr_offset = offset;
+ /* Start demuxing from the right place */
+ demux->sink_segment.position = offset;
+ GST_DEBUG_OBJECT (demux, "Replaced First SCR: %" G_GINT64_FORMAT
+ " %" GST_TIME_FORMAT " in packet starting at %"
+ G_GUINT64_FORMAT, demux->first_scr,
+ GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (demux->first_scr)), offset);
+ break;
+ }
+ }
+ }
+ /* Set the base_time and avg rate */
+ demux->base_time = MPEGTIME_TO_GSTTIME (demux->first_scr);
+ demux->scr_rate_n = demux->last_scr_offset - demux->first_scr_offset;
+ demux->scr_rate_d = demux->last_scr - demux->first_scr;
+ if (G_LIKELY (demux->first_pts != G_MAXUINT64 &&
+ demux->last_pts != G_MAXUINT64)) {
+ /* update the src segment */
+ demux->src_segment.format = GST_FORMAT_TIME;
+ demux->src_segment.start =
+ MPEGTIME_TO_GSTTIME (demux->first_pts) - demux->base_time;
+ demux->src_segment.stop = -1;
+ gst_segment_set_duration (&demux->src_segment, GST_FORMAT_TIME,
+ MPEGTIME_TO_GSTTIME (demux->last_pts - demux->first_pts));
+ gst_segment_set_position (&demux->src_segment, GST_FORMAT_TIME,
+ demux->src_segment.start);
+ }
+ GST_INFO_OBJECT (demux, "sink segment configured %" GST_SEGMENT_FORMAT,
+ &demux->sink_segment);
+ GST_INFO_OBJECT (demux, "src segment configured %" GST_SEGMENT_FORMAT,
+ &demux->src_segment);
+ res = TRUE;
+ beach:
+ return res;
+ }
+
+ static inline GstFlowReturn
+ gst_ps_demux_pull_block (GstPad * pad, GstPsDemux * demux,
+ guint64 offset, guint size)
+ {
+ GstFlowReturn ret;
+ GstBuffer *buffer = NULL;
+ ret = gst_pad_pull_range (pad, offset, size, &buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (demux, "pull range at %" G_GUINT64_FORMAT
+ " size %u failed", offset, size);
+ goto beach;
+ } else
+ GST_LOG_OBJECT (demux, "pull range at %" G_GUINT64_FORMAT
+ " size %u done", offset, size);
+ if (demux->sink_segment.rate < 0) {
+ GST_LOG_OBJECT (demux, "setting discont flag on backward rate");
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+ ret = gst_ps_demux_chain (pad, GST_OBJECT (demux), buffer);
+ beach:
+ return ret;
+ }
+
+ static void
+ gst_ps_demux_loop (GstPad * pad)
+ {
+ GstPsDemux *demux;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 offset = 0;
+ demux = GST_PS_DEMUX (gst_pad_get_parent (pad));
+ if (G_UNLIKELY (demux->flushing)) {
+ ret = GST_FLOW_FLUSHING;
+ goto pause;
+ }
+
+ if (G_UNLIKELY (demux->sink_segment.format == GST_FORMAT_UNDEFINED))
+ gst_ps_sink_get_duration (demux);
+ offset = demux->sink_segment.position;
+ if (demux->sink_segment.rate >= 0) {
+ guint size = BLOCK_SZ;
+ if (G_LIKELY (demux->sink_segment.stop != (guint64) - 1)) {
+ size = MIN (size, demux->sink_segment.stop - offset);
+ }
+ /* pull in data */
+ ret = gst_ps_demux_pull_block (pad, demux, offset, size);
+ /* pause if something went wrong */
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto pause;
+ /* update our position */
+ offset += size;
+ gst_segment_set_position (&demux->sink_segment, GST_FORMAT_BYTES, offset);
+ /* check EOS condition */
+ /* FIXME: The src_segment.stop is not including the SCR after seek(set) */
+ if ((demux->sink_segment.position >= demux->sink_segment.stop) ||
+ (demux->src_segment.stop != (guint64) - 1 &&
+ demux->src_segment.position >=
+ demux->src_segment.stop + demux->base_time)) {
+ GST_DEBUG_OBJECT (demux,
+ "forward mode using segment reached end of " "segment pos %"
+ GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " pos in bytes %"
+ G_GUINT64_FORMAT " stop in bytes %" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (demux->src_segment.position),
+ GST_TIME_ARGS (demux->src_segment.stop),
+ demux->sink_segment.position, demux->sink_segment.stop);
+ ret = GST_FLOW_EOS;
+ goto pause;
+ }
+ } else { /* Reverse playback */
+ guint64 size = MIN (offset, BLOCK_SZ);
+ /* pull in data */
+ ret = gst_ps_demux_pull_block (pad, demux, offset - size, size);
+ /* pause if something went wrong */
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto pause;
+ /* update our position */
+ offset -= size;
+ gst_segment_set_position (&demux->sink_segment, GST_FORMAT_BYTES, offset);
+ /* check EOS condition */
+ if (demux->sink_segment.position <= demux->sink_segment.start ||
+ demux->src_segment.position <= demux->src_segment.start) {
+ GST_DEBUG_OBJECT (demux,
+ "reverse mode using segment reached end of " "segment pos %"
+ GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " pos in bytes %"
+ G_GUINT64_FORMAT " stop in bytes %" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (demux->src_segment.position),
+ GST_TIME_ARGS (demux->src_segment.start),
+ demux->sink_segment.position, demux->sink_segment.start);
+ ret = GST_FLOW_EOS;
+ goto pause;
+ }
+ }
+
+ gst_object_unref (demux);
+ return;
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ if (demux->src_segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->src_segment.stop) == -1)
+ stop = demux->src_segment.duration;
+ if (demux->sink_segment.rate >= 0) {
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, stop));
+ gst_ps_demux_send_event (demux,
+ gst_event_new_segment_done (GST_FORMAT_TIME, stop));
+ } else { /* Reverse playback */
+ GST_LOG_OBJECT (demux,
+ "Sending segment done, at beginning of " "segment");
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, demux->src_segment.start));
+ gst_ps_demux_send_event (demux,
+ gst_event_new_segment_done (GST_FORMAT_TIME,
+ demux->src_segment.start));
+ }
+ } else {
+ GstEvent *event;
+ /* normal playback, send EOS to all linked pads */
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (!gst_ps_demux_send_event (demux, event)
+ && !have_open_streams (demux)) {
+ GST_WARNING_OBJECT (demux, "EOS and no streams open");
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("No valid streams detected"));
+ }
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GstEvent *event;
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_ps_demux_send_event (demux, event);
+ }
+
+ gst_object_unref (demux);
+ return;
+ }
+ }
+
+ /* If we can pull that's preferred */
+ static gboolean
+ gst_ps_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ gboolean res = FALSE;
+ GstQuery *query = gst_query_new_scheduling ();
+ if (gst_pad_peer_query (sinkpad, query)) {
+ if (gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE)) {
+ res = gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+ } else {
+ res = gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ } else {
+ res = gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+
+ gst_query_unref (query);
+ return res;
+ }
+
+ /* This function gets called when we activate ourselves in push mode. */
+ static gboolean
+ gst_ps_demux_sink_activate_push (GstPad * sinkpad, GstObject * parent,
+ gboolean active)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+ demux->random_access = FALSE;
+ return TRUE;
+ }
+
+ /* this function gets called when we activate ourselves in pull mode.
+ * We can perform random access to the resource and we start a task
+ * to start reading */
+ static gboolean
+ gst_ps_demux_sink_activate_pull (GstPad * sinkpad, GstObject * parent,
+ gboolean active)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+ if (active) {
+ GST_DEBUG ("pull mode activated");
+ demux->random_access = TRUE;
+ return gst_pad_start_task (sinkpad,
+ (GstTaskFunction) gst_ps_demux_loop, sinkpad, NULL);
+ } else {
+ demux->random_access = FALSE;
+ return gst_pad_stop_task (sinkpad);
+ }
+ }
+
+ static gboolean
+ gst_ps_demux_sink_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ if (mode == GST_PAD_MODE_PUSH) {
+ return gst_ps_demux_sink_activate_push (pad, parent, active);
+ } else if (mode == GST_PAD_MODE_PULL) {
+ return gst_ps_demux_sink_activate_pull (pad, parent, active);
+ }
+ return FALSE;
+ }
+
+ /* EOS and NOT_LINKED need to be combined. This means that we return:
+ *
+ * GST_FLOW_NOT_LINKED: when all pads NOT_LINKED.
+ * GST_FLOW_EOS: when all pads EOS or NOT_LINKED.
+ */
+ static GstFlowReturn
+ gst_ps_demux_combine_flows (GstPsDemux * demux, GstFlowReturn ret)
+ {
+ GST_LOG_OBJECT (demux, "flow return: %s", gst_flow_get_name (ret));
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+ if (G_UNLIKELY (demux->need_no_more_pads && ret == GST_FLOW_NOT_LINKED))
+ ret = GST_FLOW_OK;
+ GST_LOG_OBJECT (demux, "combined flow return: %s", gst_flow_get_name (ret));
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_ps_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 avail;
+ gboolean save, discont;
+ discont = GST_BUFFER_IS_DISCONT (buffer);
+ if (discont) {
+ GST_LOG_OBJECT (demux,
+ "Received buffer with discont flag and" " offset %"
+ G_GUINT64_FORMAT, GST_BUFFER_OFFSET (buffer));
+ gst_pes_filter_drain (&demux->filter);
+ gst_ps_demux_mark_discont (demux, TRUE, FALSE);
+ /* mark discont on all streams */
+ if (demux->sink_segment.rate >= 0.0) {
+ demux->current_scr = G_MAXUINT64;
+ demux->bytes_since_scr = 0;
+ }
+ } else {
+ GST_LOG_OBJECT (demux,
+ "Received buffer with offset %" G_GUINT64_FORMAT,
+ GST_BUFFER_OFFSET (buffer));
+ }
+
+ /* We keep the offset to interpolate SCR */
+ demux->adapter_offset = GST_BUFFER_OFFSET (buffer);
+ gst_adapter_push (demux->adapter, buffer);
+ demux->bytes_since_scr += gst_buffer_get_size (buffer);
+ avail = gst_adapter_available (demux->rev_adapter);
+ if (avail > 0) {
+ GST_LOG_OBJECT (demux, "appending %u saved bytes", avail);
+ /* if we have a previous reverse chunk, append this now */
+ /* FIXME this code assumes we receive discont buffers all thei
+ * time */
+ gst_adapter_push (demux->adapter,
+ gst_adapter_take_buffer (demux->rev_adapter, avail));
+ }
+
+ avail = gst_adapter_available (demux->adapter);
+ GST_LOG_OBJECT (demux, "avail now: %d, state %d", avail, demux->filter.state);
+ switch (demux->filter.state) {
+ case STATE_DATA_SKIP:
+ case STATE_DATA_PUSH:
+ ret = gst_pes_filter_process (&demux->filter);
+ break;
+ case STATE_HEADER_PARSE:
+ break;
+ default:
+ break;
+ }
+
+ switch (ret) {
+ case GST_FLOW_NEED_MORE_DATA:
+ /* Go and get more data */
+ ret = GST_FLOW_OK;
+ goto done;
+ case GST_FLOW_LOST_SYNC:
+ /* for FLOW_OK or lost-sync, carry onto resync */
+ ret = GST_FLOW_OK;
+ break;
+ case GST_FLOW_OK:
+ break;
+ default:
+ /* Any other return value should be sent upstream immediately */
+ goto done;
+ }
+
+ /* align adapter data to sync boundary, we keep the data up to the next sync
+ * point. */
+ save = TRUE;
+ while (gst_ps_demux_resync (demux, save)) {
+ gboolean ps_sync = TRUE;
+ if (G_UNLIKELY (demux->flushing)) {
+ ret = GST_FLOW_FLUSHING;
+ goto done;
+ }
+
+ /* now switch on last synced byte */
+ switch (demux->last_sync_code) {
+ case ID_PS_PACK_START_CODE:
+ ret = gst_ps_demux_parse_pack_start (demux);
+ break;
+ case ID_PS_SYSTEM_HEADER_START_CODE:
+ ret = gst_ps_demux_parse_sys_head (demux);
+ break;
+ case ID_PS_END_CODE:
+ /* Skip final 4 bytes */
+ gst_adapter_flush (demux->adapter, 4);
+ ADAPTER_OFFSET_FLUSH (4);
+ ret = GST_FLOW_OK;
+ goto done;
+ case ID_PS_PROGRAM_STREAM_MAP:
+ ret = gst_ps_demux_parse_psm (demux);
+ break;
+ default:
+ if (gst_ps_demux_is_pes_sync (demux->last_sync_code)) {
+ ret = gst_pes_filter_process (&demux->filter);
+ } else {
+ GST_DEBUG_OBJECT (demux, "sync_code=%08x, non PES sync found"
+ ", continuing", demux->last_sync_code);
+ ps_sync = FALSE;
+ ret = GST_FLOW_LOST_SYNC;
+ }
+ break;
+ }
+ /* if we found a ps sync, we stop saving the data, any non-ps sync gets
+ * saved up to the next ps sync. */
+ if (ps_sync)
+ save = FALSE;
+ switch (ret) {
+ case GST_FLOW_NEED_MORE_DATA:
+ GST_DEBUG_OBJECT (demux, "need more data");
+ ret = GST_FLOW_OK;
+ goto done;
+ case GST_FLOW_LOST_SYNC:
+ if (!save || demux->sink_segment.rate >= 0.0) {
+ GST_DEBUG_OBJECT (demux, "flushing 3 bytes");
+ gst_adapter_flush (demux->adapter, 3);
+ ADAPTER_OFFSET_FLUSH (3);
+ } else {
+ GST_DEBUG_OBJECT (demux, "saving 3 bytes");
+ gst_adapter_push (demux->rev_adapter,
+ gst_adapter_take_buffer (demux->adapter, 3));
+ }
+ ret = GST_FLOW_OK;
+ break;
+ default:
+ ret = gst_ps_demux_combine_flows (demux, ret);
+ if (ret != GST_FLOW_OK)
+ goto done;
+ break;
+ }
+ }
+ done:
+ return ret;
+ }
+
+ static GstStateChangeReturn
+ gst_ps_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstPsDemux *demux = GST_PS_DEMUX (element);
+ GstStateChangeReturn result;
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ gst_pes_filter_init (&demux->filter, demux->adapter,
+ &demux->adapter_offset);
+ gst_pes_filter_set_callbacks (&demux->filter,
+ (GstPESFilterData) gst_ps_demux_data_cb,
+ (GstPESFilterResync) gst_ps_demux_resync_cb, demux);
+ demux->filter.gather_pes = TRUE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ default:
+ break;
+ }
+
+ result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_ps_demux_reset (demux);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_pes_filter_uninit (&demux->filter);
+ break;
+ default:
+ break;
+ }
+
+ return result;
+ }
+
+ static void
+ gst_segment_set_position (GstSegment * segment, GstFormat format,
+ guint64 position)
+ {
+ if (segment->format == GST_FORMAT_UNDEFINED) {
+ segment->format = format;
+ }
+ segment->position = position;
+ }
+
+ static void
+ gst_segment_set_duration (GstSegment * segment, GstFormat format,
+ guint64 duration)
+ {
+ if (segment->format == GST_FORMAT_UNDEFINED) {
+ segment->format = format;
+ }
+ segment->duration = duration;
+ }
--- /dev/null
- if (!_pmt_stream_in_program (program, stream))
+ /*
+ * mpegtsbase.c -
+ * Copyright (C) 2007 Alessandro Decina
+ * 2010 Edward Hervey
+ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
+ * Author: Edward Hervey <bilboed@bilboed.com>, Collabora Ltd.
+ *
+ * Authors:
+ * Alessandro Decina <alessandro@nnva.org>
+ * Zaheer Abbas Merali <zaheerabbas at merali dot org>
+ * Edward Hervey <edward.hervey@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <stdlib.h>
+ #include <string.h>
+
+ #include <glib.h>
+
+ #include <gst/gst-i18n-plugin.h>
+ #include "mpegtsbase.h"
+ #include "gstmpegdesc.h"
+
+ #define RUNNING_STATUS_RUNNING 4
+
+ GST_DEBUG_CATEGORY_STATIC (mpegts_base_debug);
+ #define GST_CAT_DEFAULT mpegts_base_debug
+
+ static GQuark QUARK_PROGRAMS;
+ static GQuark QUARK_PROGRAM_NUMBER;
+ static GQuark QUARK_PID;
+ static GQuark QUARK_PCR_PID;
+ static GQuark QUARK_STREAMS;
+ static GQuark QUARK_STREAM_TYPE;
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpegts, " "systemstream = (boolean) true ")
+ );
+
+ #define DEFAULT_IGNORE_PCR FALSE
+
+ enum
+ {
+ PROP_0,
+ PROP_PARSE_PRIVATE_SECTIONS,
+ PROP_IGNORE_PCR,
+ /* FILL ME */
+ };
+
+ static void mpegts_base_dispose (GObject * object);
+ static void mpegts_base_finalize (GObject * object);
+ static void mpegts_base_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void mpegts_base_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static void mpegts_base_free_program (MpegTSBaseProgram * program);
+ static void mpegts_base_deactivate_program (MpegTSBase * base,
+ MpegTSBaseProgram * program);
+ static gboolean mpegts_base_sink_activate (GstPad * pad, GstObject * parent);
+ static gboolean mpegts_base_sink_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static GstFlowReturn mpegts_base_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static gboolean mpegts_base_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean mpegts_base_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean mpegts_base_default_sink_query (MpegTSBase * base,
+ GstQuery * query);
+ static GstStateChangeReturn mpegts_base_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean mpegts_base_get_tags_from_eit (MpegTSBase * base,
+ GstMpegtsSection * section);
+ static gboolean mpegts_base_parse_atsc_mgt (MpegTSBase * base,
+ GstMpegtsSection * section);
+ static gboolean remove_each_program (gpointer key, MpegTSBaseProgram * program,
+ MpegTSBase * base);
+
+ static void
+ _extra_init (void)
+ {
+ QUARK_PROGRAMS = g_quark_from_string ("programs");
+ QUARK_PROGRAM_NUMBER = g_quark_from_string ("program-number");
+ QUARK_PID = g_quark_from_string ("pid");
+ QUARK_PCR_PID = g_quark_from_string ("pcr-pid");
+ QUARK_STREAMS = g_quark_from_string ("streams");
+ QUARK_STREAM_TYPE = g_quark_from_string ("stream-type");
+ GST_DEBUG_CATEGORY_INIT (mpegts_base_debug, "mpegtsbase", 0,
+ "MPEG transport stream base class");
+ gst_mpegts_initialize ();
+ }
+
+ #define mpegts_base_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (MpegTSBase, mpegts_base, GST_TYPE_ELEMENT,
+ _extra_init ());
+
+ /* Default implementation is that mpegtsbase can remove any program */
+ static gboolean
+ mpegts_base_can_remove_program (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ return TRUE;
+ }
+
+ static void
+ mpegts_base_class_init (MpegTSBaseClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ klass->can_remove_program = mpegts_base_can_remove_program;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ element_class->change_state = mpegts_base_change_state;
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gobject_class->dispose = mpegts_base_dispose;
+ gobject_class->finalize = mpegts_base_finalize;
+ gobject_class->set_property = mpegts_base_set_property;
+ gobject_class->get_property = mpegts_base_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PARSE_PRIVATE_SECTIONS,
+ g_param_spec_boolean ("parse-private-sections", "Parse private sections",
+ "Parse private sections", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMpegtsBase:ignore-pcr:
+ *
+ * Ignore PCR (Program Clock Reference) data from MPEG-TS PSI.
+ * This can help with playback of some broken files.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_IGNORE_PCR,
+ g_param_spec_boolean ("ignore-pcr", "Ignore PCR stream for timing",
+ "Ignore PCR stream for timing", DEFAULT_IGNORE_PCR,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ klass->sink_query = GST_DEBUG_FUNCPTR (mpegts_base_default_sink_query);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_MPEGTS_BASE, 0);
+ }
+
+ static void
+ mpegts_base_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (object);
+
+ switch (prop_id) {
+ case PROP_PARSE_PRIVATE_SECTIONS:
+ base->parse_private_sections = g_value_get_boolean (value);
+ break;
+ case PROP_IGNORE_PCR:
+ base->ignore_pcr = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ mpegts_base_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (object);
+
+ switch (prop_id) {
+ case PROP_PARSE_PRIVATE_SECTIONS:
+ g_value_set_boolean (value, base->parse_private_sections);
+ break;
+ case PROP_IGNORE_PCR:
+ g_value_set_boolean (value, base->ignore_pcr);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+
+ static void
+ mpegts_base_reset (MpegTSBase * base)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ mpegts_packetizer_clear (base->packetizer);
+ memset (base->is_pes, 0, 1024);
+ memset (base->known_psi, 0, 1024);
+
+ /* FIXME : Actually these are not *always* know SI streams
+ * depending on the variant of mpeg-ts being used. */
+
+ /* Known PIDs : PAT, TSDT, IPMP CIT */
+ MPEGTS_BIT_SET (base->known_psi, 0);
+ MPEGTS_BIT_SET (base->known_psi, 2);
+ MPEGTS_BIT_SET (base->known_psi, 3);
+ /* TDT, TOT, ST */
+ MPEGTS_BIT_SET (base->known_psi, 0x14);
+ /* network synchronization */
+ MPEGTS_BIT_SET (base->known_psi, 0x15);
+
+ /* ATSC */
+ MPEGTS_BIT_SET (base->known_psi, 0x1ffb);
+
+ if (base->pat) {
+ g_ptr_array_unref (base->pat);
+ base->pat = NULL;
+ }
+
+ gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&base->out_segment, GST_FORMAT_UNDEFINED);
+ base->last_seek_seqnum = GST_SEQNUM_INVALID;
+
+ base->mode = BASE_MODE_STREAMING;
+ base->seen_pat = FALSE;
+ base->seek_offset = -1;
+
+ g_hash_table_foreach_remove (base->programs, (GHRFunc) remove_each_program,
+ base);
+
+ base->streams_aware = GST_OBJECT_PARENT (base)
+ && GST_OBJECT_FLAG_IS_SET (GST_OBJECT_PARENT (base),
+ GST_BIN_FLAG_STREAMS_AWARE);
+ GST_DEBUG_OBJECT (base, "Streams aware : %d", base->streams_aware);
+
+ if (klass->reset)
+ klass->reset (base);
+ }
+
+ static void
+ mpegts_base_init (MpegTSBase * base)
+ {
+ base->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_activate_function (base->sinkpad, mpegts_base_sink_activate);
+ gst_pad_set_activatemode_function (base->sinkpad,
+ mpegts_base_sink_activate_mode);
+ gst_pad_set_chain_function (base->sinkpad, mpegts_base_chain);
+ gst_pad_set_event_function (base->sinkpad, mpegts_base_sink_event);
+ gst_pad_set_query_function (base->sinkpad, mpegts_base_sink_query);
+ gst_element_add_pad (GST_ELEMENT (base), base->sinkpad);
+
+ base->disposed = FALSE;
+ base->packetizer = mpegts_packetizer_new ();
+ base->programs = g_hash_table_new_full (g_direct_hash, g_direct_equal,
+ NULL, (GDestroyNotify) mpegts_base_free_program);
+
+ base->parse_private_sections = FALSE;
+ base->is_pes = g_new0 (guint8, 1024);
+ base->known_psi = g_new0 (guint8, 1024);
+ base->program_size = sizeof (MpegTSBaseProgram);
+ base->stream_size = sizeof (MpegTSBaseStream);
+
+ base->push_data = TRUE;
+ base->push_section = TRUE;
+ base->ignore_pcr = DEFAULT_IGNORE_PCR;
+
+ mpegts_base_reset (base);
+ }
+
+ static void
+ mpegts_base_dispose (GObject * object)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (object);
+
+ if (!base->disposed) {
+ g_object_unref (base->packetizer);
+ base->disposed = TRUE;
+ g_free (base->known_psi);
+ g_free (base->is_pes);
+ }
+
+ if (G_OBJECT_CLASS (parent_class)->dispose)
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ mpegts_base_finalize (GObject * object)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (object);
+
+ if (base->pat) {
+ g_ptr_array_unref (base->pat);
+ base->pat = NULL;
+ }
+ g_hash_table_destroy (base->programs);
+
+ if (G_OBJECT_CLASS (parent_class)->finalize)
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+
+ /* returns NULL if no matching descriptor found *
+ * otherwise returns a descriptor that needs to *
+ * be freed */
+ const GstMpegtsDescriptor *
+ mpegts_get_descriptor_from_stream (MpegTSBaseStream * stream, guint8 tag)
+ {
+ GstMpegtsPMTStream *pmt = stream->stream;
+
+ GST_DEBUG ("Searching for tag 0x%02x in stream 0x%04x (stream_type 0x%02x)",
+ tag, stream->pid, stream->stream_type);
+
+ return gst_mpegts_find_descriptor (pmt->descriptors, tag);
+ }
+
+ const GstMpegtsDescriptor *
+ mpegts_get_descriptor_from_stream_with_extension (MpegTSBaseStream * stream,
+ guint8 tag, guint8 tag_extension)
+ {
+ GstMpegtsPMTStream *pmt = stream->stream;
+
+ GST_DEBUG ("Searching for tag 0x%02x tag_extension 0x%02x "
+ "in stream 0x%04x (stream_type 0x%02x)",
+ tag, tag_extension, stream->pid, stream->stream_type);
+
+ return gst_mpegts_find_descriptor_with_extension (pmt->descriptors, tag,
+ tag_extension);
+ }
+
+ typedef struct
+ {
+ gboolean res;
+ guint16 pid;
+ } PIDLookup;
+
+ static void
+ foreach_pid_in_program (gpointer key, MpegTSBaseProgram * program,
+ PIDLookup * lookup)
+ {
+ if (!program->active)
+ return;
+ if (program->streams[lookup->pid])
+ lookup->res = TRUE;
+ }
+
+ static gboolean
+ mpegts_pid_in_active_programs (MpegTSBase * base, guint16 pid)
+ {
+ PIDLookup lookup;
+
+ lookup.res = FALSE;
+ lookup.pid = pid;
+ g_hash_table_foreach (base->programs, (GHFunc) foreach_pid_in_program,
+ &lookup);
+
+ return lookup.res;
+ }
+
+ /* returns NULL if no matching descriptor found *
+ * otherwise returns a descriptor that needs to *
+ * be freed */
+ const GstMpegtsDescriptor *
+ mpegts_get_descriptor_from_program (MpegTSBaseProgram * program, guint8 tag)
+ {
+ const GstMpegtsPMT *pmt = program->pmt;
+
+ return gst_mpegts_find_descriptor (pmt->descriptors, tag);
+ }
+
+ static gchar *
+ _get_upstream_id (GstElement * element, GstPad * sinkpad)
+ {
+ gchar *upstream_id = gst_pad_get_stream_id (sinkpad);
+
+ if (!upstream_id) {
+ /* Try to create one from the upstream URI, else use a randome number */
+ GstQuery *query;
+ gchar *uri = NULL;
+
+ /* Try to generate one from the URI query and
+ * if it fails take a random number instead */
+ query = gst_query_new_uri ();
+ if (gst_element_query (element, query)) {
+ gst_query_parse_uri (query, &uri);
+ }
+
+ if (uri) {
+ GChecksum *cs;
+
+ /* And then generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+ g_free (uri);
+ upstream_id = g_strdup (g_checksum_get_string (cs));
+ g_checksum_free (cs);
+ } else {
+ /* Just get some random number if the URI query fails */
+ GST_FIXME_OBJECT (element, "Creating random stream-id, consider "
+ "implementing a deterministic way of creating a stream-id");
+ upstream_id =
+ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
+ g_random_int (), g_random_int ());
+ }
+
+ gst_query_unref (query);
+ }
+ return upstream_id;
+ }
+
+ static MpegTSBaseProgram *
+ mpegts_base_new_program (MpegTSBase * base,
+ gint program_number, guint16 pmt_pid)
+ {
+ MpegTSBaseProgram *program;
+ gchar *upstream_id, *stream_id;
+
+ GST_DEBUG_OBJECT (base, "program_number : %d, pmt_pid : %d",
+ program_number, pmt_pid);
+
+ program = g_malloc0 (base->program_size);
+ program->program_number = program_number;
+ program->pmt_pid = pmt_pid;
+ program->pcr_pid = G_MAXUINT16;
+ program->streams = g_new0 (MpegTSBaseStream *, 0x2000);
+ program->patcount = 0;
+
+ upstream_id = _get_upstream_id ((GstElement *) base, base->sinkpad);
+ stream_id = g_strdup_printf ("%s:%d", upstream_id, program_number);
+ program->collection = gst_stream_collection_new (stream_id);
+ g_free (stream_id);
+ g_free (upstream_id);
+
+ return program;
+ }
+
+ MpegTSBaseProgram *
+ mpegts_base_add_program (MpegTSBase * base,
+ gint program_number, guint16 pmt_pid)
+ {
+ MpegTSBaseProgram *program;
+
+ GST_DEBUG_OBJECT (base, "program_number : %d, pmt_pid : %d",
+ program_number, pmt_pid);
+
+ program = mpegts_base_new_program (base, program_number, pmt_pid);
+
+ /* Mark the PMT PID as being a known PSI PID */
+ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->known_psi, pmt_pid))) {
+ GST_FIXME ("Refcounting. Setting twice a PID (0x%04x) as known PSI",
+ pmt_pid);
+ }
+ MPEGTS_BIT_SET (base->known_psi, pmt_pid);
+
+ g_hash_table_insert (base->programs,
+ GINT_TO_POINTER (program_number), program);
+
+ return program;
+ }
+
+ MpegTSBaseProgram *
+ mpegts_base_get_program (MpegTSBase * base, gint program_number)
+ {
+ MpegTSBaseProgram *program;
+
+ program = (MpegTSBaseProgram *) g_hash_table_lookup (base->programs,
+ GINT_TO_POINTER ((gint) program_number));
+
+ return program;
+ }
+
+ static MpegTSBaseProgram *
+ mpegts_base_steal_program (MpegTSBase * base, gint program_number)
+ {
+ MpegTSBaseProgram *program;
+
+ program = (MpegTSBaseProgram *) g_hash_table_lookup (base->programs,
+ GINT_TO_POINTER ((gint) program_number));
+
+ if (program)
+ g_hash_table_steal (base->programs,
+ GINT_TO_POINTER ((gint) program_number));
+
+ return program;
+ }
+
+ static void
+ mpegts_base_free_stream (MpegTSBaseStream * stream)
+ {
+ if (stream->stream_object)
+ gst_object_unref (stream->stream_object);
+ if (stream->stream_id)
+ g_free (stream->stream_id);
+ g_free (stream);
+ }
+
+ static void
+ mpegts_base_free_program (MpegTSBaseProgram * program)
+ {
+ GList *tmp;
+
+ if (program->pmt) {
+ gst_mpegts_section_unref (program->section);
+ program->pmt = NULL;
+ }
+
+ /* FIXME FIXME FIXME FREE STREAM OBJECT ! */
+ for (tmp = program->stream_list; tmp; tmp = tmp->next)
+ mpegts_base_free_stream ((MpegTSBaseStream *) tmp->data);
+
+ if (program->stream_list)
+ g_list_free (program->stream_list);
+
+ g_free (program->streams);
+
+ if (program->tags)
+ gst_tag_list_unref (program->tags);
+ if (program->collection)
+ gst_object_unref (program->collection);
+
+ g_free (program);
+ }
+
+ void
+ mpegts_base_deactivate_and_free_program (MpegTSBase * base,
+ MpegTSBaseProgram * program)
+ {
+ GST_DEBUG_OBJECT (base, "program_number : %d", program->program_number);
+
+ mpegts_base_deactivate_program (base, program);
+ mpegts_base_free_program (program);
+ }
+
+ static void
+ mpegts_base_remove_program (MpegTSBase * base, gint program_number)
+ {
+ GST_DEBUG_OBJECT (base, "program_number : %d", program_number);
+
+ g_hash_table_remove (base->programs, GINT_TO_POINTER (program_number));
+ }
+
+ static guint32
+ get_registration_from_descriptors (GPtrArray * descriptors)
+ {
+ const GstMpegtsDescriptor *desc;
+
+ if ((desc =
+ gst_mpegts_find_descriptor (descriptors,
+ GST_MTS_DESC_REGISTRATION))) {
+ if (G_UNLIKELY (desc->length < 4)) {
+ GST_WARNING ("Registration descriptor with length < 4. (Corrupted ?)");
+ } else
+ return GST_READ_UINT32_BE (desc->data + 2);
+ }
+
+ return 0;
+ }
+
+ static MpegTSBaseStream *
+ mpegts_base_program_add_stream (MpegTSBase * base,
+ MpegTSBaseProgram * program, guint16 pid, guint8 stream_type,
+ GstMpegtsPMTStream * stream)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ MpegTSBaseStream *bstream;
+
+ GST_DEBUG ("pid:0x%04x, stream_type:0x%03x", pid, stream_type);
+
+ /* FIXME : PID information/nature might change through time.
+ * We therefore *do* want to be able to replace an existing stream
+ * with updated information */
+ if (G_UNLIKELY (program->streams[pid])) {
+ if (stream_type != 0xff)
+ GST_WARNING ("Stream already present !");
+ return NULL;
+ }
+
+ bstream = g_malloc0 (base->stream_size);
+ bstream->stream_id =
+ g_strdup_printf ("%s/%08x",
+ gst_stream_collection_get_upstream_id (program->collection), pid);
+ bstream->pid = pid;
+ bstream->stream_type = stream_type;
+ bstream->stream = stream;
+ /* We don't yet know the stream type, subclasses will fill that */
+ bstream->stream_object = gst_stream_new (bstream->stream_id, NULL,
+ GST_STREAM_TYPE_UNKNOWN, GST_STREAM_FLAG_NONE);
+ if (stream) {
+ bstream->registration_id =
+ get_registration_from_descriptors (stream->descriptors);
+ GST_DEBUG ("PID 0x%04x, registration_id %" SAFE_FOURCC_FORMAT,
+ bstream->pid, SAFE_FOURCC_ARGS (bstream->registration_id));
+ }
+
+ program->streams[pid] = bstream;
+ program->stream_list = g_list_append (program->stream_list, bstream);
+
+ if (klass->stream_added)
+ if (klass->stream_added (base, bstream, program)) {
+ gst_stream_collection_add_stream (program->collection,
+ (GstStream *) gst_object_ref (bstream->stream_object));
+ bstream->in_collection = TRUE;
+ }
+
+
+ return bstream;
+ }
+
+ static void
+ mpegts_base_program_remove_stream (MpegTSBase * base,
+ MpegTSBaseProgram * program, guint16 pid)
+ {
+ MpegTSBaseClass *klass;
+ MpegTSBaseStream *stream = program->streams[pid];
+
+ GST_DEBUG ("pid:0x%04x", pid);
+
+ if (G_UNLIKELY (stream == NULL)) {
+ /* Can happen if the PCR PID is the same as a audio/video PID */
+ GST_DEBUG ("Stream already removed");
+ return;
+ }
+
+ klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ /* If subclass needs it, inform it of the stream we are about to remove */
+ if (klass->stream_removed)
+ klass->stream_removed (base, stream);
+
+ program->stream_list = g_list_remove_all (program->stream_list, stream);
+ mpegts_base_free_stream (stream);
+ program->streams[pid] = NULL;
+ }
+
+ /* Check if pmtstream is already present in the program */
+ static inline gboolean
+ _stream_in_pmt (const GstMpegtsPMT * pmt, MpegTSBaseStream * stream)
+ {
+ guint i, nbstreams = pmt->streams->len;
+
+ for (i = 0; i < nbstreams; i++) {
+ GstMpegtsPMTStream *pmt_stream = g_ptr_array_index (pmt->streams, i);
+
+ if (pmt_stream->pid == stream->pid &&
+ pmt_stream->stream_type == stream->stream_type)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static inline gboolean
+ _pmt_stream_in_program (MpegTSBaseProgram * program,
+ GstMpegtsPMTStream * stream)
+ {
+ MpegTSBaseStream *old_stream = program->streams[stream->pid];
+ if (!old_stream)
+ return FALSE;
+ return old_stream->stream_type == stream->stream_type;
+ }
+
+ static gboolean
+ mpegts_base_update_program (MpegTSBase * base, MpegTSBaseProgram * program,
+ GstMpegtsSection * section, const GstMpegtsPMT * pmt)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ const gchar *stream_id =
+ gst_stream_collection_get_upstream_id (program->collection);
+ GstStreamCollection *collection;
+ GList *tmp, *toremove;
+ guint i, nbstreams;
+
+ /* Create new collection */
+ collection = gst_stream_collection_new (stream_id);
+ gst_object_unref (program->collection);
+ program->collection = collection;
+
+ /* Replace section and pmt with the new one */
+ gst_mpegts_section_unref (program->section);
+ program->section = gst_mpegts_section_ref (section);
+ program->pmt = pmt;
+
+ /* Copy over gststream that still exist into the collection */
+ for (tmp = program->stream_list; tmp; tmp = tmp->next) {
+ MpegTSBaseStream *stream = (MpegTSBaseStream *) tmp->data;
+ if (_stream_in_pmt (pmt, stream) && stream->in_collection) {
+ gst_stream_collection_add_stream (program->collection,
+ gst_object_ref (stream->stream_object));
+ }
+ }
+
+ /* Add new streams (will also create and add gststream to the collection) */
+ nbstreams = pmt->streams->len;
+ for (i = 0; i < nbstreams; i++) {
+ GstMpegtsPMTStream *stream = g_ptr_array_index (pmt->streams, i);
++ if (!_pmt_stream_in_program (program, stream)) {
++#ifdef TIZEN_FEATURE_TSDEMUX_UPDATE_STREAM
++ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->is_pes, stream->pid)))
++ GST_FIXME
++ ("Refcounting issue. Setting twice a PID (0x%04x) as known PES",
++ stream->pid);
++ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->known_psi, stream->pid))) {
++ GST_FIXME
++ ("Refcounting issue. Setting a known PSI PID (0x%04x) as known PES",
++ stream->pid);
++ MPEGTS_BIT_UNSET (base->known_psi, stream->pid);
++ }
++ MPEGTS_BIT_SET (base->is_pes, stream->pid);
++#endif
+ mpegts_base_program_add_stream (base, program, stream->pid,
+ stream->stream_type, stream);
++ }
+ }
+
+ /* Call subclass update */
+ if (klass->update_program)
+ klass->update_program (base, program);
+
+ /* Remove streams no longer present */
+ toremove = NULL;
+ for (tmp = program->stream_list; tmp; tmp = tmp->next) {
+ MpegTSBaseStream *stream = (MpegTSBaseStream *) tmp->data;
+ if (!_stream_in_pmt (pmt, stream))
+ toremove = g_list_prepend (toremove, stream);
+ }
+ for (tmp = toremove; tmp; tmp = tmp->next) {
+ MpegTSBaseStream *stream = (MpegTSBaseStream *) tmp->data;
+ mpegts_base_program_remove_stream (base, program, stream->pid);
++#ifdef TIZEN_FEATURE_TSDEMUX_UPDATE_STREAM
++ MPEGTS_BIT_UNSET (base->is_pes, stream->pid);
++#endif
+ }
+ return TRUE;
+ }
+
+
+ static gboolean
+ _stream_is_private_section (const GstMpegtsPMT * pmt,
+ GstMpegtsPMTStream * stream)
+ {
+ switch (stream->stream_type) {
+ case GST_MPEGTS_STREAM_TYPE_SCTE_DSMCC_DCB:
+ case GST_MPEGTS_STREAM_TYPE_SCTE_SIGNALING:
+ {
+ guint32 registration_id =
+ get_registration_from_descriptors (stream->descriptors);
+ /* Not a private section stream */
+ if (registration_id != DRF_ID_CUEI && registration_id != DRF_ID_ETV1)
+ return FALSE;
+ }
+ case GST_MPEGTS_STREAM_TYPE_PRIVATE_SECTIONS:
+ case GST_MPEGTS_STREAM_TYPE_MHEG:
+ case GST_MPEGTS_STREAM_TYPE_DSM_CC:
+ case GST_MPEGTS_STREAM_TYPE_DSMCC_A:
+ case GST_MPEGTS_STREAM_TYPE_DSMCC_B:
+ case GST_MPEGTS_STREAM_TYPE_DSMCC_C:
+ case GST_MPEGTS_STREAM_TYPE_DSMCC_D:
+ case GST_MPEGTS_STREAM_TYPE_SL_FLEXMUX_SECTIONS:
+ case GST_MPEGTS_STREAM_TYPE_METADATA_SECTIONS:
+ /* known PSI streams */
+ return TRUE;
+ case GST_MPEGTS_STREAM_TYPE_SCTE_SIT:
+ {
+ guint32 registration_id =
+ get_registration_from_descriptors (pmt->descriptors);
+ /* Not a private section stream */
+ if (registration_id != DRF_ID_CUEI)
+ return FALSE;
+ return TRUE;
+ }
+ default:
+ return FALSE;
+ }
+ }
+
+ /* Return TRUE if programs are equal */
+ static gboolean
+ mpegts_base_is_same_program (MpegTSBase * base, MpegTSBaseProgram * oldprogram,
+ guint16 new_pmt_pid, const GstMpegtsPMT * new_pmt)
+ {
+ guint i, nbstreams;
+ MpegTSBaseStream *oldstream;
+ gboolean sawpcrpid = FALSE;
+
+ if (oldprogram->pmt_pid != new_pmt_pid) {
+ GST_DEBUG ("Different pmt_pid (new:0x%04x, old:0x%04x)", new_pmt_pid,
+ oldprogram->pmt_pid);
+ return FALSE;
+ }
+
+ if (!base->ignore_pcr && oldprogram->pcr_pid != new_pmt->pcr_pid) {
+ GST_DEBUG ("Different pcr_pid (new:0x%04x, old:0x%04x)",
+ new_pmt->pcr_pid, oldprogram->pcr_pid);
+ return FALSE;
+ }
+
+ /* Check the streams */
+ nbstreams = new_pmt->streams->len;
+ for (i = 0; i < nbstreams; ++i) {
+ GstMpegtsPMTStream *stream = g_ptr_array_index (new_pmt->streams, i);
+
+ oldstream = oldprogram->streams[stream->pid];
+ if (!oldstream) {
+ GST_DEBUG ("New stream 0x%04x not present in old program", stream->pid);
+ return FALSE;
+ }
+ if (oldstream->stream_type != stream->stream_type) {
+ GST_DEBUG
+ ("New stream 0x%04x has a different stream type (new:%d, old:%d)",
+ stream->pid, stream->stream_type, oldstream->stream_type);
+ return FALSE;
+ }
+ if (stream->pid == oldprogram->pcr_pid)
+ sawpcrpid = TRUE;
+ }
+
+ /* If the pcr is not shared with an existing stream, we'll have one extra stream */
+ if (!sawpcrpid)
+ nbstreams += 1;
+
+ if (nbstreams != g_list_length (oldprogram->stream_list)) {
+ GST_DEBUG ("Different number of streams (new:%d, old:%d)",
+ nbstreams, g_list_length (oldprogram->stream_list));
+ return FALSE;
+ }
+
+ GST_DEBUG ("Programs are equal");
+ return TRUE;
+ }
+
+ /* Return TRUE if program is an update
+ *
+ * A program is equal if:
+ * * The program number is the same (will be if it enters this function)
+ * * AND The PMT PID is equal to the old one
+ * * AND It contains at least one stream from the previous program
+ *
+ * Changes that are acceptable are therefore:
+ * * New streams appearing
+ * * Old streams going away
+ * * PCR PID changing
+ *
+ * Unclear changes:
+ * * PMT PID being changed ?
+ * * Properties of elementary stream being changed ? (new tags ? metadata ?)
+ */
+ static gboolean
+ mpegts_base_is_program_update (MpegTSBase * base,
+ MpegTSBaseProgram * oldprogram, guint16 new_pmt_pid,
+ const GstMpegtsPMT * new_pmt)
+ {
+ guint i, nbstreams;
+ MpegTSBaseStream *oldstream;
+
+ if (oldprogram->pmt_pid != new_pmt_pid) {
+ /* FIXME/CHECK: Can a program be updated by just changing its PID
+ * in the PAT ? */
+ GST_DEBUG ("Different pmt_pid (new:0x%04x, old:0x%04x)", new_pmt_pid,
+ oldprogram->pmt_pid);
+ return FALSE;
+ }
+
+ /* Check if at least one stream from the previous program is still present
+ * in the new program */
+
+ /* Check the streams */
+ nbstreams = new_pmt->streams->len;
+ for (i = 0; i < nbstreams; ++i) {
+ GstMpegtsPMTStream *stream = g_ptr_array_index (new_pmt->streams, i);
+
+ oldstream = oldprogram->streams[stream->pid];
+ if (!oldstream) {
+ GST_DEBUG ("New stream 0x%04x not present in old program", stream->pid);
+ } else if (oldstream->stream_type != stream->stream_type) {
+ GST_DEBUG
+ ("New stream 0x%04x has a different stream type (new:%d, old:%d)",
+ stream->pid, stream->stream_type, oldstream->stream_type);
+ } else if (!_stream_is_private_section (new_pmt, stream)) {
+ /* FIXME : We should actually be checking a bit deeper,
+ * especially for private streams (where the differentiation is
+ * done at the registration level) */
+ GST_DEBUG
+ ("Stream 0x%04x is identical (stream_type %d) ! Program is an update",
+ stream->pid, stream->stream_type);
+ return TRUE;
+ }
+ }
+
+ GST_DEBUG ("Program is not an update of the previous one");
+ return FALSE;
+ }
+
+ static void
+ mpegts_base_deactivate_program (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ gint i;
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ if (G_UNLIKELY (program->active == FALSE))
+ return;
+
+ GST_DEBUG_OBJECT (base, "Deactivating PMT");
+
+ program->active = FALSE;
+
+ if (program->pmt) {
+ for (i = 0; i < program->pmt->streams->len; ++i) {
+ GstMpegtsPMTStream *stream = g_ptr_array_index (program->pmt->streams, i);
+
+ mpegts_base_program_remove_stream (base, program, stream->pid);
+
+ /* Only unset the is_pes/known_psi bit if the PID isn't used in any other active
+ * program */
+ if (!mpegts_pid_in_active_programs (base, stream->pid)) {
+ if (_stream_is_private_section (program->pmt, stream)) {
+ if (base->parse_private_sections)
+ MPEGTS_BIT_UNSET (base->known_psi, stream->pid);
+ } else {
+ MPEGTS_BIT_UNSET (base->is_pes, stream->pid);
+ }
+ }
+ }
+
+ /* remove pcr stream */
+ /* FIXME : This might actually be shared with another stream ? */
+ mpegts_base_program_remove_stream (base, program, program->pcr_pid);
+ if (!mpegts_pid_in_active_programs (base, program->pcr_pid))
+ MPEGTS_BIT_UNSET (base->is_pes, program->pcr_pid);
+
+ GST_DEBUG ("program stream_list is now %p", program->stream_list);
+ }
+
+ /* Inform subclasses we're deactivating this program */
+ if (klass->program_stopped)
+ klass->program_stopped (base, program);
+ }
+
+ static void
+ mpegts_base_activate_program (MpegTSBase * base, MpegTSBaseProgram * program,
+ guint16 pmt_pid, GstMpegtsSection * section, const GstMpegtsPMT * pmt,
+ gboolean initial_program)
+ {
+ guint i;
+ MpegTSBaseClass *klass;
+
+ if (G_UNLIKELY (program->active))
+ return;
+
+ GST_DEBUG ("Activating program %d", program->program_number);
+
+ /* activate new pmt */
+ if (program->section)
+ gst_mpegts_section_unref (program->section);
+ program->section = gst_mpegts_section_ref (section);
+
+ program->pmt = pmt;
+ program->pmt_pid = pmt_pid;
+ if (!base->ignore_pcr)
+ program->pcr_pid = pmt->pcr_pid;
+ else
+ program->pcr_pid = 0x1fff;
+
+ /* extract top-level registration_id if present */
+ program->registration_id =
+ get_registration_from_descriptors (pmt->descriptors);
+ GST_DEBUG ("program 0x%04x, registration_id %" SAFE_FOURCC_FORMAT,
+ program->program_number, SAFE_FOURCC_ARGS (program->registration_id));
+
+ for (i = 0; i < pmt->streams->len; ++i) {
+ GstMpegtsPMTStream *stream = g_ptr_array_index (pmt->streams, i);
+ if (_stream_is_private_section (pmt, stream)) {
+ if (base->parse_private_sections)
+ MPEGTS_BIT_SET (base->known_psi, stream->pid);
+ } else {
+ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->is_pes, stream->pid)))
+ GST_FIXME
+ ("Refcounting issue. Setting twice a PID (0x%04x) as known PES",
+ stream->pid);
+ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->known_psi, stream->pid))) {
+ GST_FIXME
+ ("Refcounting issue. Setting a known PSI PID (0x%04x) as known PES",
+ stream->pid);
+ MPEGTS_BIT_UNSET (base->known_psi, stream->pid);
+ }
+ MPEGTS_BIT_SET (base->is_pes, stream->pid);
+ }
+ mpegts_base_program_add_stream (base, program,
+ stream->pid, stream->stream_type, stream);
+ }
+ /* We add the PCR pid last. If that PID is already used by one of the media
+ * streams above, no new stream will be created */
+ mpegts_base_program_add_stream (base, program, program->pcr_pid, -1, NULL);
+ MPEGTS_BIT_SET (base->is_pes, program->pcr_pid);
+
+ program->active = TRUE;
+ program->initial_program = initial_program;
+
+ klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ if (klass->program_started != NULL)
+ klass->program_started (base, program);
+
+ GST_DEBUG_OBJECT (base, "new pmt activated");
+ }
+
+
+ static gboolean
+ mpegts_base_apply_pat (MpegTSBase * base, GstMpegtsSection * section)
+ {
+ GPtrArray *pat = gst_mpegts_section_get_pat (section);
+ GPtrArray *old_pat;
+ MpegTSBaseProgram *program;
+ gint i;
+
+ if (G_UNLIKELY (pat == NULL))
+ return FALSE;
+
+ GST_INFO_OBJECT (base, "PAT");
+
+ /* Applying a new PAT does two things:
+ * * It adds the new programs to the list of programs this element handles
+ * and increments at the same time the number of times a program is referenced.
+ *
+ * * If there was a previously active PAT, It decrements the reference count
+ * of all program it used. If a program is no longer needed, it is removed.
+ */
+
+ old_pat = base->pat;
+ base->pat = pat;
+
+ GST_LOG ("Activating new Program Association Table");
+ /* activate the new table */
+ for (i = 0; i < pat->len; ++i) {
+ GstMpegtsPatProgram *patp = g_ptr_array_index (pat, i);
+
+ program = mpegts_base_get_program (base, patp->program_number);
+ if (program) {
+ /* IF the program already existed, just check if the PMT PID changed */
+ if (program->pmt_pid != patp->network_or_program_map_PID) {
+ if (program->pmt_pid != G_MAXUINT16) {
+ /* pmt pid changed */
+ /* FIXME: when this happens it may still be pmt pid of another
+ * program, so setting to False may make it go through expensive
+ * path in is_psi unnecessarily */
+ MPEGTS_BIT_UNSET (base->known_psi, program->pmt_pid);
+ }
+
+ program->pmt_pid = patp->network_or_program_map_PID;
+ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->known_psi, program->pmt_pid)))
+ GST_FIXME
+ ("Refcounting issue. Setting twice a PMT PID (0x%04x) as know PSI",
+ program->pmt_pid);
+ MPEGTS_BIT_SET (base->known_psi, patp->network_or_program_map_PID);
++#ifdef TIZEN_FEATURE_TSDEMUX_UPDATE_PMT
++ MPEGTS_BIT_UNSET (base->is_pes, patp->network_or_program_map_PID);
++#endif
+ }
+ } else {
+ /* Create a new program */
+ program =
+ mpegts_base_add_program (base, patp->program_number,
+ patp->network_or_program_map_PID);
+ }
+ /* We mark this program as being referenced by one PAT */
+ program->patcount += 1;
+ }
+
+ if (old_pat) {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ /* deactivate the old table */
+ GST_LOG ("Deactivating old Program Association Table");
+
+ for (i = 0; i < old_pat->len; ++i) {
+ GstMpegtsPatProgram *patp = g_ptr_array_index (old_pat, i);
+
+ program = mpegts_base_get_program (base, patp->program_number);
+ if (G_UNLIKELY (program == NULL)) {
+ GST_DEBUG_OBJECT (base, "broken PAT, duplicated entry for program %d",
+ patp->program_number);
+ continue;
+ }
+
+ if (--program->patcount > 0)
+ /* the program has been referenced by the new pat, keep it */
+ continue;
+
+ GST_INFO_OBJECT (base, "PAT removing program 0x%04x 0x%04x",
+ patp->program_number, patp->network_or_program_map_PID);
+
+ if (klass->can_remove_program (base, program)) {
+ mpegts_base_deactivate_program (base, program);
+ mpegts_base_remove_program (base, patp->program_number);
+ } else {
+ /* sub-class now owns the program and must call
+ * mpegts_base_deactivate_and_free_program later */
+ g_hash_table_steal (base->programs,
+ GINT_TO_POINTER ((gint) patp->program_number));
+ }
+ /* FIXME: when this happens it may still be pmt pid of another
+ * program, so setting to False may make it go through expensive
+ * path in is_psi unnecessarily */
+ if (G_UNLIKELY (MPEGTS_BIT_IS_SET (base->known_psi,
+ patp->network_or_program_map_PID))) {
+ GST_FIXME
+ ("Program refcounting : Setting twice a pid (0x%04x) as known PSI",
+ patp->network_or_program_map_PID);
+ }
+ MPEGTS_BIT_SET (base->known_psi, patp->network_or_program_map_PID);
+ mpegts_packetizer_remove_stream (base->packetizer,
+ patp->network_or_program_map_PID);
+ }
+
+ g_ptr_array_unref (old_pat);
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ mpegts_base_apply_pmt (MpegTSBase * base, GstMpegtsSection * section)
+ {
+ const GstMpegtsPMT *pmt;
+ MpegTSBaseProgram *program, *old_program;
+ guint program_number;
+ gboolean initial_program = TRUE;
+
+ pmt = gst_mpegts_section_get_pmt (section);
+ if (G_UNLIKELY (pmt == NULL)) {
+ GST_ERROR ("Could not get PMT (corrupted ?)");
+ return FALSE;
+ }
+
+ /* FIXME : not so sure this is valid anymore */
+ if (G_UNLIKELY (base->seen_pat == FALSE)) {
+ GST_WARNING ("Got pmt without pat first. Returning");
+ /* remove the stream since we won't get another PMT otherwise */
+ mpegts_packetizer_remove_stream (base->packetizer, section->pid);
+ return TRUE;
+ }
+
+ program_number = section->subtable_extension;
+ GST_DEBUG ("Applying PMT (program_number:%d, pid:0x%04x)",
+ program_number, section->pid);
+
+ /* In order for stream switching to happen properly in decodebin(2),
+ * we need to first add the new pads (i.e. activate the new program)
+ * before removing the old ones (i.e. deactivating the old program)
+ */
+
+ old_program = mpegts_base_get_program (base, program_number);
+ if (G_UNLIKELY (old_program == NULL))
+ goto no_program;
+
+ if (base->streams_aware
+ && mpegts_base_is_program_update (base, old_program, section->pid, pmt)) {
+ GST_FIXME ("We are streams_aware and new program is an update");
+ /* The program is an update, and we can add/remove pads dynamically */
+ mpegts_base_update_program (base, old_program, section, pmt);
+ goto beach;
+ }
+
+ if (G_UNLIKELY (mpegts_base_is_same_program (base, old_program, section->pid,
+ pmt)))
+ goto same_program;
+
+ /* If the current program is active, this means we have a new program */
+ if (old_program->active) {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ old_program = mpegts_base_steal_program (base, program_number);
+ program = mpegts_base_new_program (base, program_number, section->pid);
+ program->patcount = old_program->patcount;
+
+ /* Deactivate the old program */
+ /* FIXME : THIS IS BREAKING THE STREAM SWITCHING LOGIC !
+ * */
+ if (klass->can_remove_program (base, old_program)) {
+ mpegts_base_deactivate_program (base, old_program);
+ mpegts_base_free_program (old_program);
+ } else {
+ /* sub-class now owns the program and must call
+ * mpegts_base_deactivate_and_free_program later */
+ g_hash_table_steal (base->programs,
+ GINT_TO_POINTER ((gint) old_program->program_number));
+ }
+ /* Add new program to the programs we track */
+ g_hash_table_insert (base->programs,
+ GINT_TO_POINTER (program_number), program);
+ initial_program = FALSE;
+ } else {
+ GST_DEBUG ("Program update, re-using same program");
+ program = old_program;
+ }
+
+ /* activate program */
+ /* Ownership of pmt_info is given to the program */
+ mpegts_base_activate_program (base, program, section->pid, section, pmt,
+ initial_program);
+
+ beach:
+ GST_DEBUG ("Done activating program");
+ return TRUE;
+
+ no_program:
+ {
+ GST_ERROR ("Attempted to apply a PMT on a program that wasn't created");
+ return TRUE;
+ }
+
+ same_program:
+ {
+ GST_DEBUG ("Not applying identical program");
+ return TRUE;
+ }
+ }
+
+ static void
+ mpegts_base_handle_psi (MpegTSBase * base, GstMpegtsSection * section)
+ {
+ gboolean post_message = TRUE;
+
+ GST_DEBUG ("Handling PSI (pid: 0x%04x , table_id: 0x%02x)",
+ section->pid, section->table_id);
+
+ switch (section->section_type) {
+ case GST_MPEGTS_SECTION_PAT:
+ post_message = mpegts_base_apply_pat (base, section);
+ if (base->seen_pat == FALSE) {
+ base->seen_pat = TRUE;
+ GST_DEBUG ("First PAT offset: %" G_GUINT64_FORMAT, section->offset);
+ mpegts_packetizer_set_reference_offset (base->packetizer,
+ section->offset);
+ }
+ break;
+ case GST_MPEGTS_SECTION_PMT:
+ post_message = mpegts_base_apply_pmt (base, section);
+ break;
+ case GST_MPEGTS_SECTION_EIT:
+ /* some tag xtraction + posting */
+ post_message = mpegts_base_get_tags_from_eit (base, section);
+ break;
+ case GST_MPEGTS_SECTION_ATSC_MGT:
+ post_message = mpegts_base_parse_atsc_mgt (base, section);
+ break;
+ default:
+ break;
+ }
+
+ /* Finally post message (if it wasn't corrupted) */
+ if (post_message)
+ gst_element_post_message (GST_ELEMENT_CAST (base),
+ gst_message_new_mpegts_section (GST_OBJECT (base), section));
+ gst_mpegts_section_unref (section);
+ }
+
+ static gboolean
+ mpegts_base_parse_atsc_mgt (MpegTSBase * base, GstMpegtsSection * section)
+ {
+ const GstMpegtsAtscMGT *mgt;
+ gint i;
+
+ mgt = gst_mpegts_section_get_atsc_mgt (section);
+ if (G_UNLIKELY (mgt == NULL))
+ return FALSE;
+
+ for (i = 0; i < mgt->tables->len; ++i) {
+ GstMpegtsAtscMGTTable *table = g_ptr_array_index (mgt->tables, i);
+
+ if ((table->table_type >= GST_MPEGTS_ATSC_MGT_TABLE_TYPE_EIT0 &&
+ table->table_type <= GST_MPEGTS_ATSC_MGT_TABLE_TYPE_EIT127) ||
+ (table->table_type >= GST_MPEGTS_ATSC_MGT_TABLE_TYPE_ETT0 &&
+ table->table_type <= GST_MPEGTS_ATSC_MGT_TABLE_TYPE_ETT127)) {
+ MPEGTS_BIT_SET (base->known_psi, table->pid);
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ mpegts_base_get_tags_from_eit (MpegTSBase * base, GstMpegtsSection * section)
+ {
+ const GstMpegtsEIT *eit;
+ guint i;
+ MpegTSBaseProgram *program;
+
+ /* Early exit if it's not from the present/following table_id */
+ if (section->table_id != GST_MTS_TABLE_ID_EVENT_INFORMATION_ACTUAL_TS_PRESENT
+ && section->table_id !=
+ GST_MTS_TABLE_ID_EVENT_INFORMATION_OTHER_TS_PRESENT)
+ return TRUE;
+
+ eit = gst_mpegts_section_get_eit (section);
+ if (G_UNLIKELY (eit == NULL))
+ return FALSE;
+
+ program = mpegts_base_get_program (base, section->subtable_extension);
+
+ GST_DEBUG
+ ("program_id:0x%04x, table_id:0x%02x, actual_stream:%d, present_following:%d, program:%p",
+ section->subtable_extension, section->table_id, eit->actual_stream,
+ eit->present_following, program);
+
+ if (program && eit->present_following) {
+ for (i = 0; i < eit->events->len; i++) {
+ GstMpegtsEITEvent *event = g_ptr_array_index (eit->events, i);
+ const GstMpegtsDescriptor *desc;
+
+ if (event->running_status == RUNNING_STATUS_RUNNING) {
+ program->event_id = event->event_id;
+ if ((desc =
+ gst_mpegts_find_descriptor (event->descriptors,
+ GST_MTS_DESC_DVB_SHORT_EVENT))) {
+ gchar *name = NULL, *text = NULL;
+
+ if (gst_mpegts_descriptor_parse_dvb_short_event (desc, NULL, &name,
+ &text)) {
+ if (!program->tags)
+ program->tags = gst_tag_list_new_empty ();
+
+ if (name) {
+ gst_tag_list_add (program->tags, GST_TAG_MERGE_APPEND,
+ GST_TAG_TITLE, name, NULL);
+ g_free (name);
+ }
+ if (text) {
+ gst_tag_list_add (program->tags, GST_TAG_MERGE_APPEND,
+ GST_TAG_DESCRIPTION, text, NULL);
+ g_free (text);
+ }
+ /* FIXME : Is it correct to post an event duration as a GST_TAG_DURATION ??? */
+ gst_tag_list_add (program->tags, GST_TAG_MERGE_APPEND,
+ GST_TAG_DURATION, event->duration * GST_SECOND, NULL);
+ return TRUE;
+ }
+ }
+ }
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ remove_each_program (gpointer key, MpegTSBaseProgram * program,
+ MpegTSBase * base)
+ {
+ /* First deactivate it */
+ mpegts_base_deactivate_program (base, program);
+
+ return TRUE;
+ }
+
+ static inline GstFlowReturn
+ mpegts_base_drain (MpegTSBase * base)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ /* Call implementation */
+ if (klass->drain)
+ return klass->drain (base);
+
+ return GST_FLOW_OK;
+ }
+
+ static inline void
+ mpegts_base_flush (MpegTSBase * base, gboolean hard)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ /* Call implementation */
+ if (klass->flush)
+ klass->flush (base, hard);
+ }
+
+ static gboolean
+ mpegts_base_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean res = TRUE;
+ gboolean hard;
+ MpegTSBase *base = GST_MPEGTS_BASE (parent);
+ gboolean is_sticky = GST_EVENT_IS_STICKY (event);
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ GstStructure *structure = NULL;
++ GstCaps *caps = NULL;
++ gboolean caps_ret = FALSE;
++#endif
+
+ GST_DEBUG_OBJECT (base, "Got event %s",
+ gst_event_type_get_name (GST_EVENT_TYPE (event)));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ gst_event_copy_segment (event, &base->segment);
+ GST_DEBUG_OBJECT (base, "Received segment %" GST_SEGMENT_FORMAT,
+ &base->segment);
+ /* Check if we need to switch PCR/PTS handling */
+ if (base->segment.format == GST_FORMAT_TIME) {
+ base->packetizer->calculate_offset = FALSE;
+ base->packetizer->calculate_skew = TRUE;
+ /* Seek was handled upstream */
+ base->last_seek_seqnum = gst_event_get_seqnum (event);
+ } else {
+ base->packetizer->calculate_offset = TRUE;
+ base->packetizer->calculate_skew = FALSE;
+ }
+
+ res = GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, event);
+ break;
+ case GST_EVENT_STREAM_START:
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_CAPS:
+ /* FIXME, do something */
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ if (base->packetizer) {
++ gst_event_parse_caps (event, &caps);
++ GST_DEBUG_OBJECT (base, "got caps: %" GST_PTR_FORMAT, caps);
++
++ structure = gst_caps_get_structure (caps, 0);
++ caps_ret = gst_structure_get_boolean (structure, "is_live", &base->packetizer->is_live_stream);
++ if (caps_ret == FALSE)
++ base->packetizer->is_live_stream = TRUE;
++ } else {
++ GST_DEBUG_OBJECT (base, "base->packetizer pointer is NULL !!!");
++ }
++#endif
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ res = GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, event);
+ hard = (base->mode != BASE_MODE_SEEKING);
+ mpegts_packetizer_flush (base->packetizer, hard);
+ mpegts_base_flush (base, hard);
+ gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED);
+ base->seen_pat = FALSE;
+ break;
+ default:
+ res = GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, event);
+ }
+
+ /* Always return TRUE for sticky events */
+ if (is_sticky)
+ res = TRUE;
+
+ return res;
+ }
+
+ static gboolean
+ mpegts_base_default_sink_query (MpegTSBase * base, GstQuery * query)
+ {
+ return gst_pad_query_default (base->sinkpad, GST_OBJECT (base), query);
+ }
+
+ static gboolean
+ mpegts_base_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (parent);
+
+ GST_DEBUG_OBJECT (base, "Got query %s",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ return GST_MPEGTS_BASE_GET_CLASS (base)->sink_query (base, query);
+ }
+
+ static GstFlowReturn
+ mpegts_base_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ MpegTSBase *base;
+ MpegTSPacketizerPacketReturn pret;
+ MpegTSPacketizer2 *packetizer;
+ MpegTSPacketizerPacket packet;
+ MpegTSBaseClass *klass;
+
+ base = GST_MPEGTS_BASE (parent);
+ klass = GST_MPEGTS_BASE_GET_CLASS (base);
+
+ packetizer = base->packetizer;
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ GST_DEBUG_OBJECT (base, "Got DISCONT buffer, flushing");
+ res = mpegts_base_drain (base);
+ if (G_UNLIKELY (res != GST_FLOW_OK))
+ return res;
+
+ mpegts_base_flush (base, FALSE);
+ /* In the case of discontinuities in push-mode with TIME segment
+ * we want to drop all previous observations (hard:TRUE) from
+ * the packetizer */
+ if (base->mode == BASE_MODE_PUSHING
+ && base->segment.format == GST_FORMAT_TIME) {
+ mpegts_packetizer_flush (base->packetizer, TRUE);
+ mpegts_packetizer_clear (base->packetizer);
+ } else
+ mpegts_packetizer_flush (base->packetizer, FALSE);
+ }
+
+ mpegts_packetizer_push (base->packetizer, buf);
+
+ while (res == GST_FLOW_OK) {
+ pret = mpegts_packetizer_next_packet (base->packetizer, &packet);
+
+ /* If we don't have enough data, return */
+ if (G_UNLIKELY (pret == PACKET_NEED_MORE))
+ break;
+
+ if (G_UNLIKELY (pret == PACKET_BAD)) {
+ /* bad header, skip the packet */
+ GST_DEBUG_OBJECT (base, "bad packet, skipping");
+ goto next;
+ }
+
+ if (klass->inspect_packet)
+ klass->inspect_packet (base, &packet);
+
+ /* If it's a known PES, push it */
+ if (MPEGTS_BIT_IS_SET (base->is_pes, packet.pid)) {
+ /* push the packet downstream */
+ if (base->push_data)
+ res = klass->push (base, &packet, NULL);
+ } else if (packet.payload
+ && MPEGTS_BIT_IS_SET (base->known_psi, packet.pid)) {
+ /* base PSI data */
+ GList *others, *tmp;
+ GstMpegtsSection *section;
+
+ section = mpegts_packetizer_push_section (packetizer, &packet, &others);
+ if (section)
+ mpegts_base_handle_psi (base, section);
+ if (G_UNLIKELY (others)) {
+ for (tmp = others; tmp; tmp = tmp->next)
+ mpegts_base_handle_psi (base, (GstMpegtsSection *) tmp->data);
+ g_list_free (others);
+ }
+
+ /* we need to push section packet downstream */
+ if (base->push_section)
+ res = klass->push (base, &packet, section);
+
+ } else if (base->push_unknown) {
+ res = klass->push (base, &packet, NULL);
+ } else if (packet.payload && packet.pid != 0x1fff)
+ GST_LOG ("PID 0x%04x Saw packet on a pid we don't handle", packet.pid);
+
+ next:
+ mpegts_packetizer_clear_packet (base->packetizer, &packet);
+ }
+
+ if (res == GST_FLOW_OK && klass->input_done)
+ res = klass->input_done (base);
+
+ return res;
+ }
+
+ static GstFlowReturn
+ mpegts_base_scan (MpegTSBase * base)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ guint i;
+ gboolean done = FALSE;
+ MpegTSPacketizerPacketReturn pret;
+ gint64 tmpval;
+ gint64 upstream_size, seek_pos, reverse_limit;
+ GstFormat format;
+ guint initial_pcr_seen;
+
+ GST_DEBUG ("Scanning for initial sync point");
+
+ /* Find initial sync point and at least 5 PCR values */
+ for (i = 0; i < 20 && !done; i++) {
+ GST_DEBUG ("Grabbing %d => %d", i * 65536, (i + 1) * 65536);
+
+ ret = gst_pad_pull_range (base->sinkpad, i * 65536, 65536, &buf);
+ if (G_UNLIKELY (ret == GST_FLOW_EOS))
+ break;
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Push to packetizer */
+ mpegts_packetizer_push (base->packetizer, buf);
+ buf = NULL;
+
+ if (mpegts_packetizer_has_packets (base->packetizer)) {
+ if (base->seek_offset == -1) {
+ /* Mark the initial sync point and remember the packetsize */
+ base->seek_offset = base->packetizer->offset;
+ GST_DEBUG ("Sync point is now %" G_GUINT64_FORMAT, base->seek_offset);
+ base->packetsize = base->packetizer->packet_size;
+ }
+ while (1) {
+ /* Eat up all packets */
+ pret = mpegts_packetizer_process_next_packet (base->packetizer);
+ if (pret == PACKET_NEED_MORE)
+ break;
+ if (pret != PACKET_BAD && base->packetizer->nb_seen_offsets >= 5) {
+ GST_DEBUG ("Got enough initial PCR");
+ done = TRUE;
+ break;
+ }
+ }
+ }
+ }
+
+ initial_pcr_seen = base->packetizer->nb_seen_offsets;
+ if (G_UNLIKELY (initial_pcr_seen == 0))
+ goto no_initial_pcr;
+ GST_DEBUG ("Seen %d initial PCR", initial_pcr_seen);
+
+ /* Now send data from the end */
+
+ /* Get the size of upstream */
+ format = GST_FORMAT_BYTES;
+ if (!gst_pad_peer_query_duration (base->sinkpad, format, &tmpval))
+ goto beach;
+ upstream_size = tmpval;
+
+ /* The scanning takes place on the last 2048kB. Considering PCR should
+ * be present at least every 100ms, this should cope with streams
+ * up to 160Mbit/s */
+ reverse_limit = MAX (0, upstream_size - 2097152);
+
+ /* Find last PCR value, searching backwards by chunks of 300 MPEG-ts packets */
+ for (seek_pos = MAX (0, upstream_size - 56400);
+ seek_pos >= reverse_limit; seek_pos -= 56400) {
+ mpegts_packetizer_clear (base->packetizer);
+ GST_DEBUG ("Grabbing %" G_GUINT64_FORMAT " => %" G_GUINT64_FORMAT, seek_pos,
+ seek_pos + 56400);
+
+ ret = gst_pad_pull_range (base->sinkpad, seek_pos, 56400, &buf);
+ if (G_UNLIKELY (ret == GST_FLOW_EOS))
+ break;
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Push to packetizer */
+ mpegts_packetizer_push (base->packetizer, buf);
+ buf = NULL;
+
+ if (mpegts_packetizer_has_packets (base->packetizer)) {
+ pret = PACKET_OK;
+ /* Eat up all packets, really try to get last PCR(s) */
+ while (pret != PACKET_NEED_MORE)
+ pret = mpegts_packetizer_process_next_packet (base->packetizer);
+
+ if (base->packetizer->nb_seen_offsets > initial_pcr_seen) {
+ GST_DEBUG ("Got last PCR(s) (total seen:%d)",
+ base->packetizer->nb_seen_offsets);
+ break;
+ }
+ }
+ }
+
+ beach:
+ mpegts_packetizer_clear (base->packetizer);
+ return ret;
+
+ no_initial_pcr:
+ mpegts_packetizer_clear (base->packetizer);
+ GST_WARNING_OBJECT (base, "Couldn't find any PCR within the first %d bytes",
+ 10 * 65536);
+ return GST_FLOW_OK;
+ }
+
+
+ static void
+ mpegts_base_loop (MpegTSBase * base)
+ {
+ GstFlowReturn ret = GST_FLOW_ERROR;
+
+ switch (base->mode) {
+ case BASE_MODE_SCANNING:
+ /* Find first sync point */
+ ret = mpegts_base_scan (base);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto error;
+ base->mode = BASE_MODE_STREAMING;
+ GST_DEBUG ("Changing to Streaming");
+ break;
+ case BASE_MODE_SEEKING:
+ /* FIXME : unclear if we still need mode_seeking... */
+ base->mode = BASE_MODE_STREAMING;
+ break;
+ case BASE_MODE_STREAMING:
+ {
+ GstBuffer *buf = NULL;
+
+ GST_DEBUG ("Pulling data from %" G_GUINT64_FORMAT, base->seek_offset);
+
+ if (G_UNLIKELY (base->last_seek_seqnum == GST_SEQNUM_INVALID)) {
+ /* No configured seek, set a valid seqnum */
+ base->last_seek_seqnum = gst_util_seqnum_next ();
+ }
+ ret = gst_pad_pull_range (base->sinkpad, base->seek_offset,
+ 100 * base->packetsize, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto error;
+ base->seek_offset += gst_buffer_get_size (buf);
+ ret = mpegts_base_chain (base->sinkpad, GST_OBJECT_CAST (base), buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto error;
+ }
+ break;
+ case BASE_MODE_PUSHING:
+ GST_WARNING ("wrong BASE_MODE_PUSHING mode in pull loop");
+ break;
+ }
+
+ return;
+
+ error:
+ {
+ GST_DEBUG_OBJECT (base, "Pausing task, reason %s", gst_flow_get_name (ret));
+ if (ret == GST_FLOW_EOS) {
+ if (!GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base,
+ gst_event_new_eos ()))
+ GST_ELEMENT_ERROR (base, STREAM, FAILED,
+ (_("Internal data stream error.")),
+ ("No program activated before EOS"));
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (base, ret);
+ GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, gst_event_new_eos ());
+ }
+ gst_pad_pause_task (base->sinkpad);
+ }
+ }
+
+
+ gboolean
+ mpegts_base_handle_seek_event (MpegTSBase * base, GstPad * pad,
+ GstEvent * event)
+ {
+ MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base);
+ GstFlowReturn ret = GST_FLOW_ERROR;
+ gdouble rate;
+ gboolean flush, instant_rate_change;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ GstEvent *flush_event = NULL;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME)
+ return FALSE;
+
+ if (GST_EVENT_SEQNUM (event) == base->last_seek_seqnum) {
+ GST_DEBUG_OBJECT (base, "Skipping already handled seek");
+ return TRUE;
+ }
+
+ if (base->mode == BASE_MODE_PUSHING) {
+ /* First try if upstream supports seeking in TIME format */
+ if (gst_pad_push_event (base->sinkpad, gst_event_ref (event))) {
+ GST_DEBUG ("upstream handled SEEK event");
+ return TRUE;
+ }
+
+ /* If the subclass can seek, do that */
+ if (klass->seek) {
+ ret = klass->seek (base, event);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ GST_WARNING ("seeking failed %s", gst_flow_get_name (ret));
+ else {
+ GstEvent *new_seek;
+
+ if (GST_CLOCK_TIME_IS_VALID (base->seek_offset)) {
+ base->mode = BASE_MODE_SEEKING;
+ new_seek = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags,
+ GST_SEEK_TYPE_SET, base->seek_offset, GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (new_seek, GST_EVENT_SEQNUM (event));
+ if (!gst_pad_push_event (base->sinkpad, new_seek))
+ ret = GST_FLOW_ERROR;
+ else
+ base->last_seek_seqnum = GST_EVENT_SEQNUM (event);
+ }
+ base->mode = BASE_MODE_PUSHING;
+ }
+ } else {
+ GST_WARNING ("subclass has no seek implementation");
+ }
+
+ return ret == GST_FLOW_OK;
+ }
+
+ if (!klass->seek) {
+ GST_WARNING ("subclass has no seek implementation");
+ return FALSE;
+ }
+
+ if (rate <= 0.0) {
+ GST_WARNING ("Negative rate not supported");
+ return FALSE;
+ }
+
+ GST_DEBUG ("seek event, rate: %f start: %" GST_TIME_FORMAT
+ " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (base->mode != BASE_MODE_PUSHING && instant_rate_change) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((rate > 0 && base->out_segment.rate < 0) ||
+ (rate < 0 && base->out_segment.rate > 0) ||
+ start_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || flush) {
+ GST_ERROR_OBJECT (base,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / base->out_segment.rate,
+ (GstSegmentFlags) (flags));
+ gst_event_set_seqnum (ev, GST_EVENT_SEQNUM (event));
+ GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, ev);
+ return TRUE;
+ }
+
+ /* stop streaming, either by flushing or by pausing the task */
+ base->mode = BASE_MODE_SEEKING;
+ if (flush) {
+ GST_DEBUG_OBJECT (base, "sending flush start");
+ flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, GST_EVENT_SEQNUM (event));
+ gst_pad_push_event (base->sinkpad, gst_event_ref (flush_event));
+ GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, flush_event);
+ } else
+ gst_pad_pause_task (base->sinkpad);
+
+ /* wait for streaming to finish */
+ GST_PAD_STREAM_LOCK (base->sinkpad);
+
+ if (flush) {
+ /* send a FLUSH_STOP for the sinkpad, since we need data for seeking */
+ GST_DEBUG_OBJECT (base, "sending flush stop");
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, GST_EVENT_SEQNUM (event));
+
+ /* ref for it to be reused later */
+ gst_pad_push_event (base->sinkpad, gst_event_ref (flush_event));
+ /* And actually flush our pending data but allow to preserve some info
+ * to perform the seek */
+ mpegts_base_flush (base, FALSE);
+ mpegts_packetizer_flush (base->packetizer, FALSE);
+ }
+
+ if (flags & (GST_SEEK_FLAG_SEGMENT)) {
+ GST_WARNING ("seek flags 0x%x are not supported", (int) flags);
+ goto done;
+ }
+
+
+ /* If the subclass can seek, do that */
+ ret = klass->seek (base, event);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ GST_WARNING ("seeking failed %s", gst_flow_get_name (ret));
+ else
+ base->last_seek_seqnum = GST_EVENT_SEQNUM (event);
+
+ if (flush_event) {
+ /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
+ GST_DEBUG_OBJECT (base, "sending flush stop");
+ GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, flush_event);
+ flush_event = NULL;
+ }
+ done:
+ if (flush_event)
+ gst_event_unref (flush_event);
+ gst_pad_start_task (base->sinkpad, (GstTaskFunction) mpegts_base_loop, base,
+ NULL);
+
+ GST_PAD_STREAM_UNLOCK (base->sinkpad);
+ return ret == GST_FLOW_OK;
+ }
+
+
+ static gboolean
+ mpegts_base_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ mpegts_base_sink_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+ MpegTSBase *base = GST_MPEGTS_BASE (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ base->mode = BASE_MODE_PUSHING;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ base->mode = BASE_MODE_SCANNING;
+ /* When working pull-based, we always use offsets for estimation */
+ base->packetizer->calculate_offset = TRUE;
+ base->packetizer->calculate_skew = FALSE;
+ gst_segment_init (&base->segment, GST_FORMAT_BYTES);
+ res =
+ gst_pad_start_task (pad, (GstTaskFunction) mpegts_base_loop, base,
+ NULL);
+ } else
+ res = gst_pad_stop_task (pad);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+ }
+
+ static GstStateChangeReturn
+ mpegts_base_change_state (GstElement * element, GstStateChange transition)
+ {
+ MpegTSBase *base;
+ GstStateChangeReturn ret;
+
+ base = GST_MPEGTS_BASE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ mpegts_base_reset (base);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ mpegts_base_reset (base);
+ if (base->mode != BASE_MODE_PUSHING)
+ base->mode = BASE_MODE_SCANNING;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ gboolean
+ gst_mpegtsbase_plugin_init (GstPlugin * plugin)
+ {
+ GST_DEBUG_CATEGORY_INIT (mpegts_base_debug, "mpegtsbase", 0,
+ "MPEG transport stream base class");
+
+ return TRUE;
+ }
--- /dev/null
+ /*
+ * mpegtspacketizer.c -
+ * Copyright (C) 2007, 2008 Alessandro Decina, Zaheer Merali
+ *
+ * Authors:
+ * Zaheer Merali <zaheerabbas at merali dot org>
+ * Alessandro Decina <alessandro@nnva.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdlib.h>
+
+ /* Skew calculation pameters */
+ #define MAX_TIME (2 * GST_SECOND)
+
+ /* maximal PCR time */
+ #define PCR_MAX_VALUE (((((guint64)1)<<33) * 300) + 298)
+ #define PCR_GST_MAX_VALUE (PCR_MAX_VALUE * GST_MSECOND / (PCR_MSECOND))
+ #define PTS_DTS_MAX_VALUE (((guint64)1) << 33)
+
+ #include "mpegtspacketizer.h"
+ #include "gstmpegdesc.h"
+
+ GST_DEBUG_CATEGORY_STATIC (mpegts_packetizer_debug);
+ #define GST_CAT_DEFAULT mpegts_packetizer_debug
+
+ static void _init_local (void);
+ G_DEFINE_TYPE_EXTENDED (MpegTSPacketizer2, mpegts_packetizer, G_TYPE_OBJECT, 0,
+ _init_local ());
+
+ #define ABSDIFF(a,b) ((a) < (b) ? (b) - (a) : (a) - (b))
+
+ #define PACKETIZER_GROUP_LOCK(p) g_mutex_lock(&((p)->group_lock))
+ #define PACKETIZER_GROUP_UNLOCK(p) g_mutex_unlock(&((p)->group_lock))
+
+ static void mpegts_packetizer_dispose (GObject * object);
+ static void mpegts_packetizer_finalize (GObject * object);
+ static GstClockTime calculate_skew (MpegTSPacketizer2 * packetizer,
+ MpegTSPCR * pcr, guint64 pcrtime, GstClockTime time);
+ static void _close_current_group (MpegTSPCR * pcrtable);
+ static void record_pcr (MpegTSPacketizer2 * packetizer, MpegTSPCR * pcrtable,
+ guint64 pcr, guint64 offset);
+
+ #define CONTINUITY_UNSET 255
+ #define VERSION_NUMBER_UNSET 255
+ #define TABLE_ID_UNSET 0xFF
+ #define PACKET_SYNC_BYTE 0x47
+
+ static inline MpegTSPCR *
+ get_pcr_table (MpegTSPacketizer2 * packetizer, guint16 pid)
+ {
+ MpegTSPCR *res;
+
+ res = packetizer->observations[packetizer->pcrtablelut[pid]];
+
+ if (G_UNLIKELY (res == NULL)) {
+ /* If we don't have a PCR table for the requested PID, create one .. */
+ res = g_new0 (MpegTSPCR, 1);
+ /* Add it to the last table position */
+ packetizer->observations[packetizer->lastobsid] = res;
+ /* Update the pcrtablelut */
+ packetizer->pcrtablelut[pid] = packetizer->lastobsid;
+ /* And increment the last know slot */
+ packetizer->lastobsid++;
+
+ /* Finally set the default values */
+ res->pid = pid;
+ res->base_time = GST_CLOCK_TIME_NONE;
+ res->base_pcrtime = GST_CLOCK_TIME_NONE;
+ res->last_pcrtime = GST_CLOCK_TIME_NONE;
+ res->window_pos = 0;
+ res->window_filling = TRUE;
+ res->window_min = 0;
+ res->skew = 0;
+ res->prev_send_diff = GST_CLOCK_TIME_NONE;
+ res->prev_out_time = GST_CLOCK_TIME_NONE;
+ res->pcroffset = 0;
+
+ res->current = g_slice_new0 (PCROffsetCurrent);
+ }
+
+ return res;
+ }
+
+ static void
+ pcr_offset_group_free (PCROffsetGroup * group)
+ {
+ g_free (group->values);
+ g_slice_free (PCROffsetGroup, group);
+ }
+
+ static void
+ flush_observations (MpegTSPacketizer2 * packetizer)
+ {
+ gint i;
+
+ for (i = 0; i < packetizer->lastobsid; i++) {
+ g_list_free_full (packetizer->observations[i]->groups,
+ (GDestroyNotify) pcr_offset_group_free);
+ if (packetizer->observations[i]->current)
+ g_slice_free (PCROffsetCurrent, packetizer->observations[i]->current);
+ g_free (packetizer->observations[i]);
+ packetizer->observations[i] = NULL;
+ }
+ memset (packetizer->pcrtablelut, 0xff, 0x2000);
+ packetizer->lastobsid = 0;
+ }
+
+ GstClockTime
+ mpegts_packetizer_get_current_time (MpegTSPacketizer2 * packetizer,
+ guint16 pcr_pid)
+ {
+ MpegTSPCR *pcrtable = get_pcr_table (packetizer, pcr_pid);
+
+ if (pcrtable == NULL)
+ return GST_CLOCK_TIME_NONE;
+
+ return mpegts_packetizer_pts_to_ts (packetizer, pcrtable->last_pcrtime,
+ pcr_pid);
+ }
+
+ static inline MpegTSPacketizerStreamSubtable *
+ find_subtable (GSList * subtables, guint8 table_id, guint16 subtable_extension)
+ {
+ GSList *tmp;
+
+ /* FIXME: Make this an array ! */
+ for (tmp = subtables; tmp; tmp = tmp->next) {
+ MpegTSPacketizerStreamSubtable *sub =
+ (MpegTSPacketizerStreamSubtable *) tmp->data;
+ if (sub->table_id == table_id
+ && sub->subtable_extension == subtable_extension)
+ return sub;
+ }
+
+ return NULL;
+ }
+
+ static gboolean
+ seen_section_before (MpegTSPacketizerStream * stream, guint8 table_id,
+ guint16 subtable_extension, guint8 version_number, guint8 section_number,
+ guint8 last_section_number, guint8 * data_start, gsize to_read)
+ {
+ MpegTSPacketizerStreamSubtable *subtable;
+
+ /* Check if we've seen this table_id/subtable_extension first */
+ subtable = find_subtable (stream->subtables, table_id, subtable_extension);
+ if (!subtable) {
+ GST_DEBUG ("Haven't seen subtable");
+ return FALSE;
+ }
+ /* If we have, check it has the same version_number */
+ if (subtable->version_number != version_number) {
+ GST_DEBUG ("Different version number");
+ return FALSE;
+ }
+ /* Did the number of sections change ? */
+ if (subtable->last_section_number != last_section_number) {
+ GST_DEBUG ("Different last_section_number");
+ return FALSE;
+ }
+ /* Finally return whether we saw that section or not */
+ if (!MPEGTS_BIT_IS_SET (subtable->seen_section, section_number)) {
+ GST_DEBUG ("Different section_number");
+ return FALSE;
+ }
+
+ if (stream->section_data) {
+ /* Everything else is the same, fall back to memcmp */
+ return (memcmp (stream->section_data, data_start, to_read) != 0);
+ }
+
+ return FALSE;
+ }
+
+ static MpegTSPacketizerStreamSubtable *
+ mpegts_packetizer_stream_subtable_new (guint8 table_id,
+ guint16 subtable_extension, guint8 last_section_number)
+ {
+ MpegTSPacketizerStreamSubtable *subtable;
+
+ subtable = g_new0 (MpegTSPacketizerStreamSubtable, 1);
+ subtable->version_number = VERSION_NUMBER_UNSET;
+ subtable->table_id = table_id;
+ subtable->subtable_extension = subtable_extension;
+ subtable->last_section_number = last_section_number;
+ return subtable;
+ }
+
+ static MpegTSPacketizerStream *
+ mpegts_packetizer_stream_new (guint16 pid)
+ {
+ MpegTSPacketizerStream *stream;
+
+ stream = (MpegTSPacketizerStream *) g_new0 (MpegTSPacketizerStream, 1);
+ stream->continuity_counter = CONTINUITY_UNSET;
+ stream->subtables = NULL;
+ stream->table_id = TABLE_ID_UNSET;
+ stream->pid = pid;
+ return stream;
+ }
+
+ static void
+ mpegts_packetizer_clear_section (MpegTSPacketizerStream * stream)
+ {
+ stream->continuity_counter = CONTINUITY_UNSET;
+ stream->section_length = 0;
+ stream->section_offset = 0;
+ stream->table_id = TABLE_ID_UNSET;
+ g_free (stream->section_data);
+ stream->section_data = NULL;
+ }
+
+ static void
+ mpegts_packetizer_stream_subtable_free (MpegTSPacketizerStreamSubtable *
+ subtable)
+ {
+ g_free (subtable);
+ }
+
+ static void
+ mpegts_packetizer_stream_free (MpegTSPacketizerStream * stream)
+ {
+ mpegts_packetizer_clear_section (stream);
+ g_slist_foreach (stream->subtables,
+ (GFunc) mpegts_packetizer_stream_subtable_free, NULL);
+ g_slist_free (stream->subtables);
+ g_free (stream);
+ }
+
+ static void
+ mpegts_packetizer_class_init (MpegTSPacketizer2Class * klass)
+ {
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->dispose = mpegts_packetizer_dispose;
+ gobject_class->finalize = mpegts_packetizer_finalize;
+ }
+
+ static void
+ mpegts_packetizer_init (MpegTSPacketizer2 * packetizer)
+ {
+ g_mutex_init (&packetizer->group_lock);
+
+ packetizer->adapter = gst_adapter_new ();
+ packetizer->offset = 0;
+ packetizer->empty = TRUE;
+ packetizer->streams = g_new0 (MpegTSPacketizerStream *, 8192);
+ packetizer->packet_size = 0;
+ packetizer->calculate_skew = FALSE;
+ packetizer->calculate_offset = FALSE;
+
+ packetizer->map_data = NULL;
+ packetizer->map_size = 0;
+ packetizer->map_offset = 0;
+ packetizer->need_sync = FALSE;
+
+ memset (packetizer->pcrtablelut, 0xff, 0x2000);
+ memset (packetizer->observations, 0x0, sizeof (packetizer->observations));
+ packetizer->lastobsid = 0;
+
+ packetizer->nb_seen_offsets = 0;
+ packetizer->refoffset = -1;
+ packetizer->last_in_time = GST_CLOCK_TIME_NONE;
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ packetizer->is_live_stream = FALSE;
++ packetizer->need_pmt_update = FALSE;
++#endif
+ packetizer->pcr_discont_threshold = GST_SECOND;
+ packetizer->last_pts = GST_CLOCK_TIME_NONE;
+ packetizer->last_dts = GST_CLOCK_TIME_NONE;
+ }
+
+ static void
+ mpegts_packetizer_dispose (GObject * object)
+ {
+ MpegTSPacketizer2 *packetizer = GST_MPEGTS_PACKETIZER (object);
+
+ if (!packetizer->disposed) {
+ if (packetizer->packet_size)
+ packetizer->packet_size = 0;
+ if (packetizer->streams) {
+ int i;
+ for (i = 0; i < 8192; i++) {
+ if (packetizer->streams[i])
+ mpegts_packetizer_stream_free (packetizer->streams[i]);
+ }
+ g_free (packetizer->streams);
+ }
+
+ gst_adapter_clear (packetizer->adapter);
+ g_object_unref (packetizer->adapter);
+ g_mutex_clear (&packetizer->group_lock);
+ packetizer->disposed = TRUE;
+ packetizer->offset = 0;
+ packetizer->empty = TRUE;
+
+ flush_observations (packetizer);
+ }
+
+ if (G_OBJECT_CLASS (mpegts_packetizer_parent_class)->dispose)
+ G_OBJECT_CLASS (mpegts_packetizer_parent_class)->dispose (object);
+ }
+
+ static void
+ mpegts_packetizer_finalize (GObject * object)
+ {
+ if (G_OBJECT_CLASS (mpegts_packetizer_parent_class)->finalize)
+ G_OBJECT_CLASS (mpegts_packetizer_parent_class)->finalize (object);
+ }
+
+ static inline guint64
+ mpegts_packetizer_compute_pcr (const guint8 * data)
+ {
+ guint32 pcr1;
+ guint16 pcr2;
+ guint64 pcr, pcr_ext;
+
+ pcr1 = GST_READ_UINT32_BE (data);
+ pcr2 = GST_READ_UINT16_BE (data + 4);
+ pcr = ((guint64) pcr1) << 1;
+ pcr |= (pcr2 & 0x8000) >> 15;
+ pcr_ext = (pcr2 & 0x01ff);
+ return pcr * 300 + pcr_ext % 300;
+ }
+
+ static gboolean
+ mpegts_packetizer_parse_adaptation_field_control (MpegTSPacketizer2 *
+ packetizer, MpegTSPacketizerPacket * packet)
+ {
+ guint8 length, afcflags;
+ guint8 *data;
+
+ length = *packet->data++;
+
+ /* an adaptation field with length 0 is valid and
+ * can be used to insert a single stuffing byte */
+ if (!length) {
+ packet->afc_flags = 0;
+ return TRUE;
+ }
+
+ if ((packet->scram_afc_cc & 0x30) == 0x20) {
+ /* no payload, adaptation field of 183 bytes */
+ if (length > 183) {
+ GST_WARNING ("PID 0x%04x afc == 0x%02x and length %d > 183",
+ packet->pid, packet->scram_afc_cc & 0x30, length);
+ return FALSE;
+ }
+ if (length != 183) {
+ GST_WARNING ("PID 0x%04x afc == 0x%02x and length %d != 183",
+ packet->pid, packet->scram_afc_cc & 0x30, length);
+ GST_MEMDUMP ("Unknown payload", packet->data + length,
+ packet->data_end - packet->data - length);
+ }
+ } else if (length == 183) {
+ /* Note: According to the specification, the adaptation field length
+ * must be 183 if there is no payload data and < 183 if the packet
+ * contains an adaptation field and payload data.
+ * Some payloaders always set the flag for payload data, even if the
+ * adaptation field length is 183. This just means a zero length
+ * payload so we clear the payload flag here and continue.
+ */
+ GST_WARNING ("PID 0x%04x afc == 0x%02x and length %d == 183 (ignored)",
+ packet->pid, packet->scram_afc_cc & 0x30, length);
+ packet->scram_afc_cc &= ~0x10;
+ } else if (length > 182) {
+ GST_WARNING ("PID 0x%04x afc == 0x%02x and length %d > 182",
+ packet->pid, packet->scram_afc_cc & 0x30, length);
+ return FALSE;
+ }
+
+ if (packet->data + length > packet->data_end) {
+ GST_DEBUG
+ ("PID 0x%04x afc length %d overflows the buffer current %d max %d",
+ packet->pid, length, (gint) (packet->data - packet->data_start),
+ (gint) (packet->data_end - packet->data_start));
+ return FALSE;
+ }
+
+ data = packet->data;
+ packet->data += length;
+
+ afcflags = packet->afc_flags = *data++;
+
+ GST_DEBUG ("flags: %s%s%s%s%s%s%s%s%s",
+ afcflags & 0x80 ? "discontinuity " : "",
+ afcflags & 0x40 ? "random_access " : "",
+ afcflags & 0x20 ? "elementary_stream_priority " : "",
+ afcflags & 0x10 ? "PCR " : "",
+ afcflags & 0x08 ? "OPCR " : "",
+ afcflags & 0x04 ? "splicing_point " : "",
+ afcflags & 0x02 ? "transport_private_data " : "",
+ afcflags & 0x01 ? "extension " : "", afcflags == 0x00 ? "<none>" : "");
+
+ /* PCR */
+ if (afcflags & MPEGTS_AFC_PCR_FLAG) {
+ MpegTSPCR *pcrtable = NULL;
+ packet->pcr = mpegts_packetizer_compute_pcr (data);
+ data += 6;
+ GST_DEBUG ("pcr 0x%04x %" G_GUINT64_FORMAT " (%" GST_TIME_FORMAT
+ ") offset:%" G_GUINT64_FORMAT, packet->pid, packet->pcr,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (packet->pcr)), packet->offset);
+
+ PACKETIZER_GROUP_LOCK (packetizer);
+ if (packetizer->calculate_skew
+ && GST_CLOCK_TIME_IS_VALID (packetizer->last_in_time)) {
+ pcrtable = get_pcr_table (packetizer, packet->pid);
+ calculate_skew (packetizer, pcrtable, packet->pcr,
+ packetizer->last_in_time);
+ }
+ if (packetizer->calculate_offset) {
+ if (!pcrtable)
+ pcrtable = get_pcr_table (packetizer, packet->pid);
+ record_pcr (packetizer, pcrtable, packet->pcr, packet->offset);
+ }
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ }
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* OPCR */
+ if (afcflags & MPEGTS_AFC_OPCR_FLAG) {
+ /* Note: We don't use/need opcr for the time being */
+ guint64 opcr = mpegts_packetizer_compute_pcr (data);
+ data += 6;
+ GST_DEBUG ("opcr %" G_GUINT64_FORMAT " (%" GST_TIME_FORMAT ")",
+ opcr, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (opcr)));
+ }
+
+ if (afcflags & MPEGTS_AFC_SPLICING_POINT_FLAG) {
+ GST_DEBUG ("splice_countdown: %u", *data++);
+ }
+
+ if (afcflags & MPEGTS_AFC_TRANSPORT_PRIVATE_DATA_FLAG) {
+ guint8 len = *data++;
+ GST_MEMDUMP ("private data", data, len);
+ data += len;
+ }
+
+ if (afcflags & MPEGTS_AFC_EXTENSION_FLAG) {
+ guint8 extlen = *data++;
+ guint8 flags = *data++;
+ GST_DEBUG ("extension size:%d flags : %s%s%s", extlen,
+ flags & 0x80 ? "ltw " : "",
+ flags & 0x40 ? "piecewise_rate " : "",
+ flags & 0x20 ? "seamless_splice " : "");
+ if (flags & 0x80) {
+ GST_DEBUG ("legal time window: valid_flag:%d offset:%d", *data >> 7,
+ GST_READ_UINT16_BE (data) & 0x7fff);
+ data += 2;
+ }
+ }
+ #endif
+
+ return TRUE;
+ }
+
+ static MpegTSPacketizerPacketReturn
+ mpegts_packetizer_parse_packet (MpegTSPacketizer2 * packetizer,
+ MpegTSPacketizerPacket * packet)
+ {
+ guint8 *data;
+ guint8 tmp;
+
+ data = packet->data_start;
+ data += 1;
+ tmp = *data;
+
+ /* transport_error_indicator 1 */
+ if (G_UNLIKELY (tmp & 0x80))
+ return PACKET_BAD;
+
+ /* payload_unit_start_indicator 1 */
+ packet->payload_unit_start_indicator = tmp & 0x40;
+
+ /* transport_priority 1 */
+ /* PID 13 */
+ packet->pid = GST_READ_UINT16_BE (data) & 0x1FFF;
+ data += 2;
+
+ packet->scram_afc_cc = tmp = *data++;
+ /* transport_scrambling_control 2 */
+ if (G_UNLIKELY (tmp & 0xc0))
+ return PACKET_BAD;
+
+ packet->data = data;
+
+ packet->afc_flags = 0;
+ packet->pcr = G_MAXUINT64;
+
+ if (FLAGS_HAS_AFC (tmp)) {
+ if (!mpegts_packetizer_parse_adaptation_field_control (packetizer, packet))
+ return FALSE;
+ }
+
+ if (FLAGS_HAS_PAYLOAD (packet->scram_afc_cc))
+ packet->payload = packet->data;
+ else
+ packet->payload = NULL;
+
+ return PACKET_OK;
+ }
+
+ static GstMpegtsSection *
+ mpegts_packetizer_parse_section_header (MpegTSPacketizer2 * packetizer,
+ MpegTSPacketizerStream * stream)
+ {
+ MpegTSPacketizerStreamSubtable *subtable;
+ GstMpegtsSection *res;
+
+ subtable =
+ find_subtable (stream->subtables, stream->table_id,
+ stream->subtable_extension);
+ if (subtable) {
+ GST_DEBUG ("Found previous subtable_extension:0x%04x",
+ stream->subtable_extension);
+ if (G_UNLIKELY (stream->version_number != subtable->version_number)) {
+ /* If the version number changed, reset the subtable */
+ subtable->version_number = stream->version_number;
+ subtable->last_section_number = stream->last_section_number;
+ memset (subtable->seen_section, 0, 32);
+ }
+ } else {
+ GST_DEBUG ("Appending new subtable_extension: 0x%04x",
+ stream->subtable_extension);
+ subtable = mpegts_packetizer_stream_subtable_new (stream->table_id,
+ stream->subtable_extension, stream->last_section_number);
+ subtable->version_number = stream->version_number;
+
+ stream->subtables = g_slist_prepend (stream->subtables, subtable);
+ }
+
+ GST_MEMDUMP ("Full section data", stream->section_data,
+ stream->section_length);
+ /* TODO ? : Replace this by an efficient version (where we provide all
+ * pre-parsed header data) */
+ res =
+ gst_mpegts_section_new (stream->pid, stream->section_data,
+ stream->section_length);
+ stream->section_data = NULL;
+ mpegts_packetizer_clear_section (stream);
+
+ if (res) {
+ /* NOTE : Due to the new mpegts-si system, There is a insanely low probability
+ * that we might have gotten a section that was corrupted (i.e. wrong crc)
+ * and that we consider it as seen.
+ *
+ * The reason why we consider this as acceptable is because all the previous
+ * checks were already done:
+ * * transport layer checks (DVB)
+ * * 0x47 validation
+ * * continuity counter validation
+ * * subtable validation
+ * * section_number validation
+ * * section_length validation
+ *
+ * The probability of this happening vs the overhead of doing CRC checks
+ * on all sections (including those we would not use) is just not worth it.
+ * */
+ MPEGTS_BIT_SET (subtable->seen_section, stream->section_number);
+ res->offset = stream->offset;
+ }
+
+ return res;
+ }
+
+ void
+ mpegts_packetizer_clear (MpegTSPacketizer2 * packetizer)
+ {
+ guint i;
+ MpegTSPCR *pcrtable;
+
+ packetizer->packet_size = 0;
+
+ if (packetizer->streams) {
+ int i;
+ for (i = 0; i < 8192; i++) {
+ if (packetizer->streams[i]) {
+ mpegts_packetizer_stream_free (packetizer->streams[i]);
+ }
+ }
+ memset (packetizer->streams, 0, 8192 * sizeof (MpegTSPacketizerStream *));
+ }
+
+ gst_adapter_clear (packetizer->adapter);
+ packetizer->offset = 0;
+ packetizer->empty = TRUE;
+ packetizer->need_sync = FALSE;
+ packetizer->map_data = NULL;
+ packetizer->map_size = 0;
+ packetizer->map_offset = 0;
+ packetizer->last_in_time = GST_CLOCK_TIME_NONE;
+ packetizer->last_pts = GST_CLOCK_TIME_NONE;
+ packetizer->last_dts = GST_CLOCK_TIME_NONE;
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ packetizer->is_live_stream = FALSE;
++#endif
+
+ pcrtable = packetizer->observations[packetizer->pcrtablelut[0x1fff]];
+ if (pcrtable)
+ pcrtable->base_time = GST_CLOCK_TIME_NONE;
+
+ /* Close current PCR group */
+ PACKETIZER_GROUP_LOCK (packetizer);
+
+ for (i = 0; i < MAX_PCR_OBS_CHANNELS; i++) {
+ if (packetizer->observations[i])
+ _close_current_group (packetizer->observations[i]);
+ else
+ break;
+ }
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ }
+
+ void
+ mpegts_packetizer_flush (MpegTSPacketizer2 * packetizer, gboolean hard)
+ {
+ guint i;
+ MpegTSPCR *pcrtable;
+ GST_DEBUG ("Flushing");
+
+ if (packetizer->streams) {
+ for (i = 0; i < 8192; i++) {
+ if (packetizer->streams[i]) {
+ mpegts_packetizer_clear_section (packetizer->streams[i]);
+ }
+ }
+ }
+ gst_adapter_clear (packetizer->adapter);
+
+ packetizer->offset = 0;
+ packetizer->empty = TRUE;
+ packetizer->need_sync = FALSE;
+ packetizer->map_data = NULL;
+ packetizer->map_size = 0;
+ packetizer->map_offset = 0;
+ packetizer->last_in_time = GST_CLOCK_TIME_NONE;
+ packetizer->last_pts = GST_CLOCK_TIME_NONE;
+ packetizer->last_dts = GST_CLOCK_TIME_NONE;
+
+ pcrtable = packetizer->observations[packetizer->pcrtablelut[0x1fff]];
+ if (pcrtable)
+ pcrtable->base_time = GST_CLOCK_TIME_NONE;
+
+ /* Close current PCR group */
+ PACKETIZER_GROUP_LOCK (packetizer);
+ for (i = 0; i < MAX_PCR_OBS_CHANNELS; i++) {
+ if (packetizer->observations[i])
+ _close_current_group (packetizer->observations[i]);
+ else
+ break;
+ }
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+
+ if (hard) {
+ /* For pull mode seeks in tsdemux the observation must be preserved */
+ flush_observations (packetizer);
+ }
+ }
+
+ void
+ mpegts_packetizer_remove_stream (MpegTSPacketizer2 * packetizer, gint16 pid)
+ {
+ MpegTSPacketizerStream *stream = packetizer->streams[pid];
+ if (stream) {
+ GST_INFO ("Removing stream for PID 0x%04x", pid);
+ mpegts_packetizer_stream_free (stream);
+ packetizer->streams[pid] = NULL;
+ }
+ }
+
+ MpegTSPacketizer2 *
+ mpegts_packetizer_new (void)
+ {
+ MpegTSPacketizer2 *packetizer;
+
+ packetizer =
+ GST_MPEGTS_PACKETIZER (g_object_new (GST_TYPE_MPEGTS_PACKETIZER, NULL));
+
+ return packetizer;
+ }
+
+ void
+ mpegts_packetizer_push (MpegTSPacketizer2 * packetizer, GstBuffer * buffer)
+ {
+ GstClockTime ts;
+ if (G_UNLIKELY (packetizer->empty)) {
+ packetizer->empty = FALSE;
+ packetizer->offset = GST_BUFFER_OFFSET (buffer);
+ }
+
+ GST_DEBUG ("Pushing %" G_GSIZE_FORMAT " byte from offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+ GST_BUFFER_OFFSET (buffer));
+ gst_adapter_push (packetizer->adapter, buffer);
+ /* If the buffer has a valid timestamp, store it - preferring DTS,
+ * which is where upstream arrival times should be stored */
+ ts = GST_BUFFER_DTS_OR_PTS (buffer);
+ if (GST_CLOCK_TIME_IS_VALID (ts))
+ packetizer->last_in_time = ts;
+ packetizer->last_pts = GST_BUFFER_PTS (buffer);
+ packetizer->last_dts = GST_BUFFER_DTS (buffer);
+ }
+
+ static void
+ mpegts_packetizer_flush_bytes (MpegTSPacketizer2 * packetizer, gsize size)
+ {
+ if (size > 0) {
+ GST_LOG ("flushing %" G_GSIZE_FORMAT " bytes from adapter", size);
+ gst_adapter_flush (packetizer->adapter, size);
+ }
+
+ packetizer->map_data = NULL;
+ packetizer->map_size = 0;
+ packetizer->map_offset = 0;
+ }
+
+ static gboolean
+ mpegts_packetizer_map (MpegTSPacketizer2 * packetizer, gsize size)
+ {
+ gsize available;
+
+ if (packetizer->map_size - packetizer->map_offset >= size)
+ return TRUE;
+
+ mpegts_packetizer_flush_bytes (packetizer, packetizer->map_offset);
+
+ available = gst_adapter_available (packetizer->adapter);
+ if (available < size)
+ return FALSE;
+
+ packetizer->map_data =
+ (guint8 *) gst_adapter_map (packetizer->adapter, available);
+ if (!packetizer->map_data)
+ return FALSE;
+
+ packetizer->map_size = available;
+ packetizer->map_offset = 0;
+
+ GST_LOG ("mapped %" G_GSIZE_FORMAT " bytes from adapter", available);
+
+ return TRUE;
+ }
+
+ static gboolean
+ mpegts_try_discover_packet_size (MpegTSPacketizer2 * packetizer)
+ {
+ guint8 *data;
+ gsize size, i, j;
+
+ static const guint psizes[] = {
+ MPEGTS_NORMAL_PACKETSIZE,
+ MPEGTS_M2TS_PACKETSIZE,
+ MPEGTS_DVB_ASI_PACKETSIZE,
+ MPEGTS_ATSC_PACKETSIZE
+ };
+
+ if (!mpegts_packetizer_map (packetizer, 4 * MPEGTS_MAX_PACKETSIZE))
+ return FALSE;
+
+ size = packetizer->map_size - packetizer->map_offset;
+ data = packetizer->map_data + packetizer->map_offset;
+
+ for (i = 0; i + 3 * MPEGTS_MAX_PACKETSIZE < size; i++) {
+ /* find a sync byte */
+ if (data[i] != PACKET_SYNC_BYTE)
+ continue;
+
+ /* check for 4 consecutive sync bytes with each possible packet size */
+ for (j = 0; j < G_N_ELEMENTS (psizes); j++) {
+ guint packet_size = psizes[j];
+
+ if (data[i + packet_size] == PACKET_SYNC_BYTE &&
+ data[i + 2 * packet_size] == PACKET_SYNC_BYTE &&
+ data[i + 3 * packet_size] == PACKET_SYNC_BYTE) {
+ packetizer->packet_size = packet_size;
+ goto out;
+ }
+ }
+ }
+
+ out:
+ packetizer->map_offset += i;
+
+ if (packetizer->packet_size == 0) {
+ GST_DEBUG ("Could not determine packet size in %" G_GSIZE_FORMAT
+ " bytes buffer, flush %" G_GSIZE_FORMAT " bytes", size, i);
+ mpegts_packetizer_flush_bytes (packetizer, packetizer->map_offset);
+ return FALSE;
+ }
+
+ GST_INFO ("have packetsize detected: %u bytes", packetizer->packet_size);
+
+ if (packetizer->packet_size == MPEGTS_M2TS_PACKETSIZE &&
+ packetizer->map_offset >= 4)
+ packetizer->map_offset -= 4;
+
+ return TRUE;
+ }
+
+ static gboolean
+ mpegts_packetizer_sync (MpegTSPacketizer2 * packetizer)
+ {
+ gboolean found = FALSE;
+ guint8 *data;
+ guint packet_size;
+ gsize size, sync_offset, i;
+
+ packet_size = packetizer->packet_size;
+
+ if (!mpegts_packetizer_map (packetizer, 3 * packet_size))
+ return FALSE;
+
+ size = packetizer->map_size - packetizer->map_offset;
+ data = packetizer->map_data + packetizer->map_offset;
+
+ if (packet_size == MPEGTS_M2TS_PACKETSIZE)
+ sync_offset = 4;
+ else
+ sync_offset = 0;
+
+ for (i = sync_offset; i + 2 * packet_size < size; i++) {
+ if (data[i] == PACKET_SYNC_BYTE &&
+ data[i + packet_size] == PACKET_SYNC_BYTE &&
+ data[i + 2 * packet_size] == PACKET_SYNC_BYTE) {
+ found = TRUE;
+ break;
+ }
+ }
+
+ packetizer->map_offset += i - sync_offset;
+
+ if (!found)
+ mpegts_packetizer_flush_bytes (packetizer, packetizer->map_offset);
+
+ return found;
+ }
+
+ MpegTSPacketizerPacketReturn
+ mpegts_packetizer_next_packet (MpegTSPacketizer2 * packetizer,
+ MpegTSPacketizerPacket * packet)
+ {
+ guint8 *packet_data;
+ guint packet_size;
+ gsize sync_offset;
+
+ packet_size = packetizer->packet_size;
+ if (G_UNLIKELY (!packet_size)) {
+ if (!mpegts_try_discover_packet_size (packetizer))
+ return PACKET_NEED_MORE;
+ packet_size = packetizer->packet_size;
+ }
+
+ /* M2TS packets don't start with the sync byte, all other variants do */
+ if (packet_size == MPEGTS_M2TS_PACKETSIZE)
+ sync_offset = 4;
+ else
+ sync_offset = 0;
+
+ while (1) {
+ if (packetizer->need_sync) {
+ if (!mpegts_packetizer_sync (packetizer))
+ return PACKET_NEED_MORE;
+ packetizer->need_sync = FALSE;
+ }
+
+ if (!mpegts_packetizer_map (packetizer, packet_size))
+ return PACKET_NEED_MORE;
+
+ packet_data = &packetizer->map_data[packetizer->map_offset + sync_offset];
+
+ /* Check sync byte */
+ if (G_UNLIKELY (*packet_data != PACKET_SYNC_BYTE)) {
+ GST_DEBUG ("lost sync");
+ packetizer->need_sync = TRUE;
+ } else {
+ /* ALL mpeg-ts variants contain 188 bytes of data. Those with bigger
+ * packet sizes contain either extra data (timesync, FEC, ..) either
+ * before or after the data */
+ packet->data_start = packet_data;
+ packet->data_end = packet->data_start + 188;
+ packet->offset = packetizer->offset;
+ GST_LOG ("offset %" G_GUINT64_FORMAT, packet->offset);
+ packetizer->offset += packet_size;
+ GST_MEMDUMP ("data_start", packet->data_start, 16);
+
+ return mpegts_packetizer_parse_packet (packetizer, packet);
+ }
+ }
+ }
+
+ MpegTSPacketizerPacketReturn
+ mpegts_packetizer_process_next_packet (MpegTSPacketizer2 * packetizer)
+ {
+ MpegTSPacketizerPacket packet;
+ MpegTSPacketizerPacketReturn ret;
+
+ ret = mpegts_packetizer_next_packet (packetizer, &packet);
+ if (ret != PACKET_NEED_MORE)
+ mpegts_packetizer_clear_packet (packetizer, &packet);
+
+ return ret;
+ }
+
+ void
+ mpegts_packetizer_clear_packet (MpegTSPacketizer2 * packetizer,
+ MpegTSPacketizerPacket * packet)
+ {
+ guint8 packet_size = packetizer->packet_size;
+
+ if (packetizer->map_data) {
+ packetizer->map_offset += packet_size;
+ if (packetizer->map_size - packetizer->map_offset < packet_size)
+ mpegts_packetizer_flush_bytes (packetizer, packetizer->map_offset);
+ }
+ }
+
+ gboolean
+ mpegts_packetizer_has_packets (MpegTSPacketizer2 * packetizer)
+ {
+ if (G_UNLIKELY (!packetizer->packet_size)) {
+ if (!mpegts_try_discover_packet_size (packetizer))
+ return FALSE;
+ }
+ return gst_adapter_available (packetizer->adapter) >= packetizer->packet_size;
+ }
+
+ /*
+ * Ideally it should just return a section if:
+ * * The section is complete
+ * * The section is valid (sanity checks for length for example)
+ * * The section applies now (current_next_indicator)
+ * * The section is an update or was never seen
+ *
+ * The section should be a new GstMpegtsSection:
+ * * properly initialized
+ * * With pid, table_id AND section_type set (move logic from mpegtsbase)
+ * * With data copied into it (yes, minor overhead)
+ *
+ * In all other cases it should just return NULL
+ *
+ * If more than one section is available, the 'remaining' field will
+ * be set to the beginning of a valid GList containing other sections.
+ * */
+ GstMpegtsSection *
+ mpegts_packetizer_push_section (MpegTSPacketizer2 * packetizer,
+ MpegTSPacketizerPacket * packet, GList ** remaining)
+ {
+ GstMpegtsSection *section;
+ GstMpegtsSection *res = NULL;
+ MpegTSPacketizerStream *stream;
+ gboolean long_packet;
+ guint8 pointer = 0, table_id;
+ guint16 subtable_extension;
+ gsize to_read;
+ guint section_length;
+ /* data points to the current read location
+ * data_start points to the beginning of the data to accumulate */
+ guint8 *data, *data_start;
+ guint8 packet_cc;
+ GList *others = NULL;
+ guint8 version_number, section_number, last_section_number;
+
+ data = packet->data;
+ packet_cc = FLAGS_CONTINUITY_COUNTER (packet->scram_afc_cc);
+
+ /* Get our filter */
+ stream = packetizer->streams[packet->pid];
+ if (G_UNLIKELY (stream == NULL)) {
+ if (!packet->payload_unit_start_indicator) {
+ /* Early exit (we need to start with a section start) */
+ GST_DEBUG ("PID 0x%04x waiting for section start", packet->pid);
+ goto out;
+ }
+ stream = mpegts_packetizer_stream_new (packet->pid);
+ packetizer->streams[packet->pid] = stream;
+ }
+
+ GST_MEMDUMP ("Full packet data", packet->data,
+ packet->data_end - packet->data);
+
+ /* This function is split into several parts:
+ *
+ * Pre checks (packet-wide). Determines where we go next
+ * accumulate_data: store data and check if section is complete
+ * section_start: handle beginning of a section, if needed loop back to
+ * accumulate_data
+ *
+ * The trigger that makes the loop stop and return is if:
+ * 1) We do not have enough data for the current packet
+ * 2) There is remaining data after a packet which is only made
+ * of stuffing bytes (0xff).
+ *
+ * Pre-loop checks, related to the whole incoming packet:
+ *
+ * If there is a CC-discont:
+ * If it is a PUSI, skip the pointer and handle section_start
+ * If not a PUSI, reset and return nothing
+ * If there is not a CC-discont:
+ * If it is a PUSI
+ * If pointer, accumulate that data and check for complete section
+ * (loop)
+ * If it is not a PUSI
+ * Accumulate the expected data and check for complete section
+ * (loop)
+ *
+ **/
+
+ if (packet->payload_unit_start_indicator) {
+ pointer = *data++;
+ /* If the pointer is zero, we're guaranteed to be able to handle it */
+ if (pointer == 0) {
+ GST_LOG
+ ("PID 0x%04x PUSI and pointer == 0, skipping straight to section_start parsing",
+ packet->pid);
+ mpegts_packetizer_clear_section (stream);
+ goto section_start;
+ }
+ }
+
+ if (stream->continuity_counter == CONTINUITY_UNSET ||
+ (stream->continuity_counter + 1) % 16 != packet_cc) {
+ if (stream->continuity_counter != CONTINUITY_UNSET)
+ GST_WARNING ("PID 0x%04x section discontinuity (%d vs %d)", packet->pid,
+ stream->continuity_counter, packet_cc);
+ mpegts_packetizer_clear_section (stream);
+ /* If not a PUSI, not much we can do */
+ if (!packet->payload_unit_start_indicator) {
+ GST_LOG ("PID 0x%04x continuity discont/unset and not PUSI, bailing out",
+ packet->pid);
+ goto out;
+ }
+ /* If PUSI, skip pointer data and carry on to section start */
+ data += pointer;
+ pointer = 0;
+ GST_LOG ("discont, but PUSI, skipped %d bytes and doing section start",
+ pointer);
+ goto section_start;
+ }
+
+ GST_LOG ("Accumulating data from beginning of packet");
+
+ data_start = data;
+
+ accumulate_data:
+ /* If not the beginning of a new section, accumulate what we have */
+ stream->continuity_counter = packet_cc;
+ to_read = MIN (stream->section_length - stream->section_offset,
+ packet->data_end - data_start);
+ memcpy (stream->section_data + stream->section_offset, data_start, to_read);
+ stream->section_offset += to_read;
+ /* Point data to after the data we accumulated */
+ data = data_start + to_read;
+ GST_DEBUG ("Appending data (need %d, have %d)", stream->section_length,
+ stream->section_offset);
+
+ /* Check if we have enough */
+ if (stream->section_offset < stream->section_length) {
+ GST_DEBUG ("PID 0x%04x, section not complete (Got %d, need %d)",
+ stream->pid, stream->section_offset, stream->section_length);
+ goto out;
+ }
+
+ /* Small sanity check. We should have collected *exactly* the right amount */
+ if (G_UNLIKELY (stream->section_offset != stream->section_length))
+ GST_WARNING ("PID 0x%04x Accumulated too much data (%d vs %d) !",
+ stream->pid, stream->section_offset, stream->section_length);
+ GST_DEBUG ("PID 0x%04x Section complete", stream->pid);
+
+ if ((section = mpegts_packetizer_parse_section_header (packetizer, stream))) {
+ if (res)
+ others = g_list_append (others, section);
+ else
+ res = section;
+ }
+
+ section_start:
+ subtable_extension = 0;
+ version_number = 0;
+ last_section_number = 0;
+ section_number = 0;
+ table_id = 0;
+
+ /* FIXME : We need at least 3 bytes (or 8 for long packets) with current algorithm :(
+ * We might end up losing sections that start across two packets (srsl...) */
+ if (data > packet->data_end - 3 || *data == 0xff) {
+ /* flush stuffing bytes and leave */
+ mpegts_packetizer_clear_section (stream);
+ goto out;
+ }
+
+ /* We have more data to process ... */
+ GST_DEBUG ("PID 0x%04x, More section present in packet (remaining bytes:%"
+ G_GSIZE_FORMAT ")", stream->pid, (gsize) (packet->data_end - data));
+ GST_MEMDUMP ("section_start", data, packet->data_end - data);
+ data_start = data;
+ /* Beginning of a new section */
+ /*
+ * section_syntax_indicator means that the header is of the following format:
+ * * table_id (8bit)
+ * * section_syntax_indicator (1bit) == 0
+ * * reserved/private fields (3bit)
+ * * section_length (12bit)
+ * * data (of size section_length)
+ * * NO CRC !
+ */
+ long_packet = data[1] & 0x80;
+
+ /* Fast path for short packets */
+ if (!long_packet) {
+ /* We can create the section now (function will check for size) */
+ GST_DEBUG ("Short packet");
+ section_length = (GST_READ_UINT16_BE (data + 1) & 0xfff) + 3;
+ /* Only do fast-path if we have enough byte */
+ if (data + section_length <= packet->data_end) {
+ if ((section =
+ gst_mpegts_section_new (packet->pid, g_memdup2 (data,
+ section_length), section_length))) {
+ GST_DEBUG ("PID 0x%04x Short section complete !", packet->pid);
+ section->offset = packet->offset;
+ if (res)
+ others = g_list_append (others, section);
+ else
+ res = section;
+ }
+ /* Advance reader and potentially read another section */
+ data += section_length;
+ if (data < packet->data_end && *data != 0xff)
+ goto section_start;
+ /* If not, exit */
+ goto out;
+ }
+ /* We don't have enough bytes to do short section shortcut */
+ }
+
+ /* Beginning of a new section, do as much pre-parsing as possible */
+ /* table_id : 8 bit */
+ table_id = *data++;
+
+ /* section_syntax_indicator : 1 bit
+ * other_fields (reserved) : 3 bit
+ * section_length : 12 bit */
+ section_length = (GST_READ_UINT16_BE (data) & 0x0FFF) + 3;
+ data += 2;
+
+ if (long_packet) {
+ /* Do we have enough data for a long packet? */
+ if (data > packet->data_end - 5)
+ goto out;
+
+ /* subtable_extension (always present, we are in a long section) */
+ /* subtable extension : 16 bit */
+ subtable_extension = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ /* reserved : 2 bit
+ * version_number : 5 bit
+ * current_next_indicator : 1 bit */
+ /* Bail out now if current_next_indicator == 0 */
+ if (G_UNLIKELY (!(*data & 0x01))) {
+ GST_DEBUG
+ ("PID 0x%04x table_id 0x%02x section does not apply (current_next_indicator == 0)",
+ packet->pid, table_id);
+ goto out;
+ }
+
+ version_number = *data++ >> 1 & 0x1f;
+ /* section_number : 8 bit */
+ section_number = *data++;
+ /* last_section_number : 8 bit */
+ last_section_number = *data++;
+ } else {
+ subtable_extension = 0;
+ version_number = 0;
+ section_number = 0;
+ last_section_number = 0;
+ }
+ GST_DEBUG
+ ("PID 0x%04x length:%d table_id:0x%02x subtable_extension:0x%04x version_number:%d section_number:%d(last:%d)",
+ packet->pid, section_length, table_id, subtable_extension, version_number,
+ section_number, last_section_number);
+
+ to_read = MIN (section_length, packet->data_end - data_start);
+
+ /* Check as early as possible whether we already saw this section
+ * i.e. that we saw a subtable with:
+ * * same subtable_extension (might be zero)
+ * * same version_number
+ * * same last_section_number
+ * * same section_number was seen
+ */
+ if (seen_section_before (stream, table_id, subtable_extension,
+ version_number, section_number, last_section_number, data_start,
+ to_read)) {
+ GST_DEBUG
+ ("PID 0x%04x Already processed table_id:0x%02x subtable_extension:0x%04x, version_number:%d, section_number:%d",
+ packet->pid, table_id, subtable_extension, version_number,
+ section_number);
+ /* skip data and see if we have more sections after */
+ data = data_start + to_read;
+ if (data == packet->data_end || *data == 0xff)
+ goto out;
+ goto section_start;
+ }
+ if (G_UNLIKELY (section_number > last_section_number)) {
+ GST_WARNING
+ ("PID 0x%04x corrupted packet (section_number:%d > last_section_number:%d)",
+ packet->pid, section_number, last_section_number);
+ goto out;
+ }
+
+
+ /* Copy over already parsed values */
+ stream->table_id = table_id;
+ stream->section_length = section_length;
+ stream->version_number = version_number;
+ stream->subtable_extension = subtable_extension;
+ stream->section_number = section_number;
+ stream->last_section_number = last_section_number;
+ stream->offset = packet->offset;
+
+ /* Create enough room to store chunks of sections */
+ stream->section_data = g_malloc (stream->section_length);
+ stream->section_offset = 0;
+
+ /* Finally, accumulate and check if we parsed enough */
+ goto accumulate_data;
+
+ out:
+ packet->data = data;
+ *remaining = others;
+
+ GST_DEBUG ("result: %p", res);
+
+ return res;
+ }
+
+ static void
+ _init_local (void)
+ {
+ GST_DEBUG_CATEGORY_INIT (mpegts_packetizer_debug, "mpegtspacketizer", 0,
+ "MPEG transport stream parser");
+ }
+
+
+ static void
+ mpegts_packetizer_resync (MpegTSPCR * pcr, GstClockTime time,
+ GstClockTime gstpcrtime, gboolean reset_skew)
+ {
+ pcr->base_time = time;
+ pcr->base_pcrtime = gstpcrtime;
+ pcr->prev_out_time = GST_CLOCK_TIME_NONE;
+ pcr->prev_send_diff = GST_CLOCK_TIME_NONE;
+ if (reset_skew) {
+ pcr->window_filling = TRUE;
+ pcr->window_pos = 0;
+ pcr->window_min = 0;
+ pcr->window_size = 0;
+ pcr->skew = 0;
+ }
+ }
+
+
+ /* Code mostly copied from -good/gst/rtpmanager/rtpjitterbuffer.c */
+
+ /* For the clock skew we use a windowed low point averaging algorithm as can be
+ * found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation
+ * over Network Delays":
+ * http://www.grame.fr/Ressources/pub/TR-050601.pdf
+ * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
+ *
+ * The idea is that the jitter is composed of:
+ *
+ * J = N + n
+ *
+ * D : a constant network delay.
+ * n : random added noise. The noise is concentrated around 0
+ *
+ * In the receiver we can track the elapsed time at the sender with:
+ *
+ * send_diff(i) = (Tsi - Ts0);
+ *
+ * Tsi : The time at the sender at packet i
+ * Ts0 : The time at the sender at the first packet
+ *
+ * This is the difference between the RTP timestamp in the first received packet
+ * and the current packet.
+ *
+ * At the receiver we have to deal with the jitter introduced by the network.
+ *
+ * recv_diff(i) = (Tri - Tr0)
+ *
+ * Tri : The time at the receiver at packet i
+ * Tr0 : The time at the receiver at the first packet
+ *
+ * Both of these values contain a jitter Ji, a jitter for packet i, so we can
+ * write:
+ *
+ * recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0))
+ *
+ * Cri : The time of the clock at the receiver for packet i
+ * D + ni : The jitter when receiving packet i
+ *
+ * We see that the network delay is irrelevant here as we can eliminate D:
+ *
+ * recv_diff(i) = (Cri + ni) - (Cr0 + n0))
+ *
+ * The drift is now expressed as:
+ *
+ * Drift(i) = recv_diff(i) - send_diff(i);
+ *
+ * We now keep the W latest values of Drift and find the minimum (this is the
+ * one with the lowest network jitter and thus the one which is least affected
+ * by it). We average this lowest value to smooth out the resulting network skew.
+ *
+ * Both the window and the weighting used for averaging influence the accuracy
+ * of the drift estimation. Finding the correct parameters turns out to be a
+ * compromise between accuracy and inertia.
+ *
+ * We use a 2 second window or up to 512 data points, which is statistically big
+ * enough to catch spikes (FIXME, detect spikes).
+ * We also use a rather large weighting factor (125) to smoothly adapt. During
+ * startup, when filling the window, we use a parabolic weighting factor, the
+ * more the window is filled, the faster we move to the detected possible skew.
+ *
+ * Returns: @time adjusted with the clock skew.
+ */
+ static GstClockTime
+ calculate_skew (MpegTSPacketizer2 * packetizer,
+ MpegTSPCR * pcr, guint64 pcrtime, GstClockTime time)
+ {
+ guint64 send_diff, recv_diff;
+ gint64 delta;
+ gint64 old;
+ gint pos, i;
+ GstClockTime gstpcrtime, out_time;
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint64 slope;
+ #endif
+
+ gstpcrtime = PCRTIME_TO_GSTTIME (pcrtime) + pcr->pcroffset;
+
+ /* first time, lock on to time and gstpcrtime */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pcr->base_time))) {
+ pcr->base_time = time;
+ pcr->prev_out_time = GST_CLOCK_TIME_NONE;
+ GST_DEBUG ("Taking new base time %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+ }
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pcr->base_pcrtime))) {
+ pcr->base_pcrtime = gstpcrtime;
+ pcr->prev_send_diff = -1;
+ GST_DEBUG ("Taking new base pcrtime %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gstpcrtime));
+ }
+
+ /* Handle PCR wraparound and resets */
+ if (GST_CLOCK_TIME_IS_VALID (pcr->last_pcrtime) &&
+ gstpcrtime < pcr->last_pcrtime) {
+ if (pcr->last_pcrtime - gstpcrtime > PCR_GST_MAX_VALUE / 2) {
+ /* PCR wraparound */
+ GST_DEBUG ("PCR wrap");
+ pcr->pcroffset += PCR_GST_MAX_VALUE;
+ gstpcrtime = PCRTIME_TO_GSTTIME (pcrtime) + pcr->pcroffset;
+ send_diff = gstpcrtime - pcr->base_pcrtime;
+ } else if (GST_CLOCK_TIME_IS_VALID (time)
+ && pcr->last_pcrtime - gstpcrtime > 15 * GST_SECOND) {
+ /* Time jumped backward by > 15 seconds, and we have a timestamp
+ * to use to close the discont. Assume a reset */
+ GST_DEBUG ("PCR reset");
+ /* Calculate PCR we would have expected for the given input time,
+ * essentially applying the reverse correction process
+ *
+ * We want to find the PCR offset to apply
+ * pcroffset = (corrected) gstpcrtime - (received) gstpcrtime
+ *
+ * send_diff = (corrected) gstpcrtime - pcr->base_pcrtime
+ * recv_diff = time - pcr->base_time
+ * out_time = pcr->base_time + send_diff
+ *
+ * We are assuming that send_diff == recv_diff
+ * (corrected) gstpcrtime - pcr->base_pcrtime = time - pcr->base_time
+ * Giving us:
+ * (corrected) gstpcrtime = time - pcr->base_time + pcr->base_pcrtime
+ *
+ * And therefore:
+ * pcroffset = time - pcr->base_time + pcr->base_pcrtime - (received) gstpcrtime
+ **/
+ pcr->pcroffset += time - pcr->base_time + pcr->base_pcrtime - gstpcrtime;
+ gstpcrtime = PCRTIME_TO_GSTTIME (pcrtime) + pcr->pcroffset;
+ send_diff = gstpcrtime - pcr->base_pcrtime;
+ GST_DEBUG ("Introduced offset is now %" GST_TIME_FORMAT
+ " corrected pcr time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pcr->pcroffset), GST_TIME_ARGS (gstpcrtime));
+ } else {
+ /* Small jumps backward, assume some arrival jitter and skip it */
+ send_diff = 0;
+
+ /* The following code are the different ways we deal with small-ish
+ * jitter, ranging in severity from "can be ignored" to "this needs a full
+ * resync" */
+
+ if (time == pcr->base_time) {
+ /* If this comes from a non-fully-timestamped source (i.e. adaptive
+ * streams), then cope with the fact that some producers generate utter
+ * PCR garbage on fragment ends.
+ *
+ * We detect this comes from a non-fully-timestamped source by the fact
+ * that the buffer time never changes */
+ GST_DEBUG ("Ignoring PCR resets on non-fully timestamped stream");
+ } else if (pcr->last_pcrtime - gstpcrtime < GST_SECOND) {
+ GST_WARNING
+ ("(small) backward timestamps at server or no buffer timestamps. Ignoring.");
+ /* This will trigger the no_skew logic before but leave other state
+ * intact */
+ time = GST_CLOCK_TIME_NONE;
+ } else {
+ /* A bigger backward step than packet out-of-order can account for. Reset base PCR time
+ * to be resynched the next time we see a PCR */
+ GST_WARNING
+ ("backward timestamps at server or no buffer timestamps. Resync base PCR");
+ pcr->base_pcrtime = GST_CLOCK_TIME_NONE;
+ }
+ }
+ } else
+ send_diff = gstpcrtime - pcr->base_pcrtime;
+
+ GST_DEBUG ("gstpcr %" GST_TIME_FORMAT ", buftime %" GST_TIME_FORMAT
+ ", base %" GST_TIME_FORMAT ", send_diff %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gstpcrtime), GST_TIME_ARGS (time),
+ GST_TIME_ARGS (pcr->base_pcrtime), GST_TIME_ARGS (send_diff));
+
+ /* keep track of the last extended pcrtime */
+ pcr->last_pcrtime = gstpcrtime;
+
+ /* we don't have an arrival timestamp so we can't do skew detection. we
+ * should still apply a timestamp based on RTP timestamp and base_time */
+ if (!GST_CLOCK_TIME_IS_VALID (time)
+ || !GST_CLOCK_TIME_IS_VALID (pcr->base_time))
+ goto no_skew;
+
+ /* elapsed time at receiver, includes the jitter */
+ recv_diff = time - pcr->base_time;
+
+ /* Ignore packets received at 100% the same time (i.e. from the same input buffer) */
+ if (G_UNLIKELY (time == pcr->prev_in_time
+ && GST_CLOCK_TIME_IS_VALID (pcr->prev_in_time)))
+ goto no_skew;
+
+ /* measure the diff */
+ delta = ((gint64) recv_diff) - ((gint64) send_diff);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* measure the slope, this gives a rought estimate between the sender speed
+ * and the receiver speed. This should be approximately 8, higher values
+ * indicate a burst (especially when the connection starts) */
+ slope = recv_diff > 0 ? (send_diff * 8) / recv_diff : 8;
+ #endif
+
+ GST_DEBUG ("time %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT
+ ", recv_diff %" GST_TIME_FORMAT ", slope %" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (time), GST_TIME_ARGS (pcr->base_time),
+ GST_TIME_ARGS (recv_diff), slope);
+
+ /* if the difference between the sender timeline and the receiver timeline
+ * changed too quickly we have to resync because the server likely restarted
+ * its timestamps. */
+ if (ABS (delta - pcr->skew) > packetizer->pcr_discont_threshold) {
+ GST_WARNING ("delta - skew: %" GST_STIME_FORMAT " too big, reset skew",
+ GST_STIME_ARGS (delta - pcr->skew));
+ mpegts_packetizer_resync (pcr, time, gstpcrtime, TRUE);
+ send_diff = 0;
+ delta = 0;
+ }
+
+ pos = pcr->window_pos;
+
+ if (G_UNLIKELY (pcr->window_filling)) {
+ /* we are filling the window */
+ GST_DEBUG ("filling %d, delta %" G_GINT64_FORMAT, pos, delta);
+ pcr->window[pos++] = delta;
+ /* calc the min delta we observed */
+ if (G_UNLIKELY (pos == 1 || delta < pcr->window_min))
+ pcr->window_min = delta;
+
+ if (G_UNLIKELY (send_diff >= MAX_TIME || pos >= MAX_WINDOW)) {
+ pcr->window_size = pos;
+
+ /* window filled */
+ GST_DEBUG ("min %" G_GINT64_FORMAT, pcr->window_min);
+
+ /* the skew is now the min */
+ pcr->skew = pcr->window_min;
+ pcr->window_filling = FALSE;
+ } else {
+ gint perc_time, perc_window, perc;
+
+ /* figure out how much we filled the window, this depends on the amount of
+ * time we have or the max number of points we keep. */
+ perc_time = send_diff * 100 / MAX_TIME;
+ perc_window = pos * 100 / MAX_WINDOW;
+ perc = MAX (perc_time, perc_window);
+
+ /* make a parabolic function, the closer we get to the MAX, the more value
+ * we give to the scaling factor of the new value */
+ perc = perc * perc;
+
+ /* quickly go to the min value when we are filling up, slowly when we are
+ * just starting because we're not sure it's a good value yet. */
+ pcr->skew =
+ (perc * pcr->window_min + ((10000 - perc) * pcr->skew)) / 10000;
+ pcr->window_size = pos + 1;
+ }
+ } else {
+ /* pick old value and store new value. We keep the previous value in order
+ * to quickly check if the min of the window changed */
+ old = pcr->window[pos];
+ pcr->window[pos++] = delta;
+
+ if (G_UNLIKELY (delta <= pcr->window_min)) {
+ /* if the new value we inserted is smaller or equal to the current min,
+ * it becomes the new min */
+ pcr->window_min = delta;
+ } else if (G_UNLIKELY (old == pcr->window_min)) {
+ gint64 min = G_MAXINT64;
+
+ /* if we removed the old min, we have to find a new min */
+ for (i = 0; i < pcr->window_size; i++) {
+ /* we found another value equal to the old min, we can stop searching now */
+ if (pcr->window[i] == old) {
+ min = old;
+ break;
+ }
+ if (pcr->window[i] < min)
+ min = pcr->window[i];
+ }
+ pcr->window_min = min;
+ }
+ /* average the min values */
+ pcr->skew = (pcr->window_min + (124 * pcr->skew)) / 125;
+ GST_DEBUG ("delta %" G_GINT64_FORMAT ", new min: %" G_GINT64_FORMAT,
+ delta, pcr->window_min);
+ }
+ /* wrap around in the window */
+ if (G_UNLIKELY (pos >= pcr->window_size))
+ pos = 0;
+
+ pcr->window_pos = pos;
+
+ no_skew:
+ /* the output time is defined as the base timestamp plus the PCR time
+ * adjusted for the clock skew .*/
+ if (pcr->base_time != -1) {
+ out_time = pcr->base_time + send_diff;
+ /* skew can be negative and we don't want to make invalid timestamps */
+ if (pcr->skew < 0 && out_time < -pcr->skew) {
+ out_time = 0;
+ } else {
+ out_time += pcr->skew;
+ }
+ /* check if timestamps are not going backwards, we can only check this if we
+ * have a previous out time and a previous send_diff */
+ if (G_LIKELY (pcr->prev_out_time != -1 && pcr->prev_send_diff != -1)) {
+ /* now check for backwards timestamps */
+ if (G_UNLIKELY (
+ /* if the server timestamps went up and the out_time backwards */
+ (send_diff > pcr->prev_send_diff
+ && out_time < pcr->prev_out_time) ||
+ /* if the server timestamps went backwards and the out_time forwards */
+ (send_diff < pcr->prev_send_diff
+ && out_time > pcr->prev_out_time) ||
+ /* if the server timestamps did not change */
+ send_diff == pcr->prev_send_diff)) {
+ GST_DEBUG ("backwards timestamps, using previous time");
+ out_time = GSTTIME_TO_MPEGTIME (out_time);
+ }
+ }
+ } else {
+ /* We simply use the pcrtime without applying any skew compensation */
+ out_time = time;
+ }
+
+ pcr->prev_out_time = out_time;
+ pcr->prev_in_time = time;
+ pcr->prev_send_diff = send_diff;
+
+ GST_DEBUG ("skew %" G_GINT64_FORMAT ", out %" GST_TIME_FORMAT,
+ pcr->skew, GST_TIME_ARGS (out_time));
+
+ return out_time;
+ }
+
+ static void
+ _reevaluate_group_pcr_offset (MpegTSPCR * pcrtable, PCROffsetGroup * group)
+ {
+ PCROffsetGroup *prev = NULL;
+ #ifndef GST_DISABLE_GST_DEBUG
+ PCROffsetGroup *first = pcrtable->groups->data;
+ #endif
+ PCROffsetCurrent *current = pcrtable->current;
+ GList *tmp;
+
+ /* Go over all ESTIMATED groups until the target group */
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ PCROffsetGroup *cur = (PCROffsetGroup *) tmp->data;
+
+ /* Skip groups that don't need re-evaluation */
+ if (!(cur->flags & PCR_GROUP_FLAG_ESTIMATED)) {
+ GST_DEBUG ("Skipping group %p pcr_offset (currently %" GST_TIME_FORMAT
+ ")", cur, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->pcr_offset)));
+ prev = cur;
+ continue;
+ }
+
+ /* This should not happen ! The first group is *always* correct (zero) */
+ if (G_UNLIKELY (prev == NULL)) {
+ GST_ERROR ("First PCR Group was not estimated (bug). Setting to zero");
+ cur->pcr_offset = 0;
+ cur->flags &= ~PCR_GROUP_FLAG_ESTIMATED;
+ return;
+ }
+
+ /* Finally do the estimation of this group's PCR offset based on the
+ * previous group information */
+
+ GST_DEBUG ("Re-evaluating group %p pcr_offset (currently %" GST_TIME_FORMAT
+ ")", group, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->pcr_offset)));
+
+ GST_DEBUG ("cur->first_pcr:%" GST_TIME_FORMAT " prev->first_pcr:%"
+ GST_TIME_FORMAT, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->first_pcr)),
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (prev->first_pcr)));
+
+ if (G_UNLIKELY (cur->first_pcr < prev->first_pcr)) {
+ guint64 prevbr, lastbr;
+ guint64 prevpcr;
+ guint64 prevoffset, lastoffset;
+
+ /* Take the previous group pcr_offset and figure out how much to add
+ * to it for the current group */
+
+ /* Right now we do a dumb bitrate estimation
+ * estimate bitrate (prev - first) : bitrate from the start
+ * estimate bitrate (prev) : bitrate of previous group
+ * estimate bitrate (last - first) : bitrate from previous group
+ *
+ * We will use raw (non-corrected/non-absolute) PCR values in a first time
+ * to detect wraparound/resets/gaps...
+ *
+ * We will use the corrected/absolute PCR values to calculate
+ * bitrate and estimate the target group pcr_offset.
+ * */
+
+ /* If the current window estimator is over the previous group, used those
+ * values as the latest (since they are more recent) */
+ if (current->group == prev && current->pending[current->last].offset) {
+ prevoffset =
+ current->pending[current->last].offset + prev->first_offset;
+ prevpcr = current->pending[current->last].pcr + prev->first_pcr;
+ /* prevbr: bitrate(prev) */
+ prevbr =
+ gst_util_uint64_scale (PCR_SECOND,
+ current->pending[current->last].offset,
+ current->pending[current->last].pcr);
+ GST_DEBUG ("Previous group bitrate (%" G_GUINT64_FORMAT " / %"
+ GST_TIME_FORMAT ") : %" G_GUINT64_FORMAT,
+ current->pending[current->last].offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (current->pending[current->last].
+ pcr)), prevbr);
+ } else if (prev->values[prev->last_value].offset) {
+ prevoffset = prev->values[prev->last_value].offset + prev->first_offset;
+ prevpcr = prev->values[prev->last_value].pcr + prev->first_pcr;
+ /* prevbr: bitrate(prev) (FIXME : Cache) */
+ prevbr =
+ gst_util_uint64_scale (PCR_SECOND,
+ prev->values[prev->last_value].offset,
+ prev->values[prev->last_value].pcr);
+ GST_DEBUG ("Previous group bitrate (%" G_GUINT64_FORMAT " / %"
+ GST_TIME_FORMAT ") : %" G_GUINT64_FORMAT,
+ prev->values[prev->last_value].offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (prev->values[prev->last_value].
+ pcr)), prevbr);
+ } else {
+ GST_DEBUG ("Using overall bitrate");
+ prevoffset = prev->values[prev->last_value].offset + prev->first_offset;
+ prevpcr = prev->values[prev->last_value].pcr + prev->first_pcr;
+ prevbr = gst_util_uint64_scale (PCR_SECOND,
+ prev->first_offset, prev->pcr_offset);
+ }
+ lastoffset = cur->values[cur->last_value].offset + cur->first_offset;
+
+ GST_DEBUG ("Offset first:%" G_GUINT64_FORMAT " prev:%" G_GUINT64_FORMAT
+ " cur:%" G_GUINT64_FORMAT, first->first_offset, prevoffset,
+ lastoffset);
+ GST_DEBUG ("PCR first:%" GST_TIME_FORMAT " prev:%" GST_TIME_FORMAT
+ " cur:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (first->first_pcr)),
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (prevpcr)),
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->values[cur->last_value].pcr +
+ cur->first_pcr)));
+
+ if (prevpcr - cur->first_pcr > (PCR_MAX_VALUE * 9 / 10)) {
+ gfloat diffprev;
+ guint64 guess_offset;
+
+ /* Let's assume there is a PCR wraparound between the previous and current
+ * group.
+ * [ prev ]... PCR_MAX | 0 ...[ current ]
+ * The estimated pcr_offset would therefore be:
+ * current.first + (PCR_MAX_VALUE - prev.first)
+ *
+ * 1) Check if bitrate(prev) would be consistent with bitrate (cur - prev)
+ */
+ guess_offset = PCR_MAX_VALUE - prev->first_pcr + cur->first_pcr;
+ lastbr = gst_util_uint64_scale (PCR_SECOND, lastoffset - prevoffset,
+ guess_offset + cur->values[cur->last_value].pcr - (prevpcr -
+ prev->first_pcr));
+ GST_DEBUG ("Wraparound prev-cur (guess_offset:%" GST_TIME_FORMAT
+ ") bitrate:%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (guess_offset)), lastbr);
+ diffprev = (float) 100.0 *(ABSDIFF (prevbr, lastbr)) / (float) prevbr;
+ GST_DEBUG ("Difference with previous bitrate:%f", diffprev);
+ if (diffprev < 10.0) {
+ GST_DEBUG ("Difference < 10.0, Setting pcr_offset to %"
+ G_GUINT64_FORMAT, guess_offset);
+ cur->pcr_offset = guess_offset;
+ if (diffprev < 1.0) {
+ GST_DEBUG ("Difference < 1.0, Removing ESTIMATED flags");
+ cur->flags &= ~PCR_GROUP_FLAG_ESTIMATED;
+ }
+ }
+ /* Indicate the the previous group is before a wrapover */
+ prev->flags |= PCR_GROUP_FLAG_WRAPOVER;
+ } else {
+ guint64 resetprev;
+ /* Let's assume there was a PCR reset between the previous and current
+ * group
+ * [ prev ] ... x | x - reset ... [ current ]
+ *
+ * The estimated pcr_offset would then be
+ * = current.first - (x - reset) + (x - prev.first) + 100ms (for safety)
+ * = current.first + reset - prev.first + 100ms (for safety)
+ */
+ /* In order to calculate the reset, we estimate what the PCR would have
+ * been by using prevbr */
+ /* FIXME : Which bitrate should we use ??? */
+ GST_DEBUG ("Using prevbr:%" G_GUINT64_FORMAT " and taking offsetdiff:%"
+ G_GUINT64_FORMAT, prevbr, cur->first_offset - prev->first_offset);
+ resetprev =
+ gst_util_uint64_scale (PCR_SECOND,
+ cur->first_offset - prev->first_offset, prevbr);
+ GST_DEBUG ("Estimated full PCR for offset %" G_GUINT64_FORMAT
+ ", using prevbr:%"
+ GST_TIME_FORMAT, cur->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (resetprev)));
+ cur->pcr_offset = prev->pcr_offset + resetprev + 100 * PCR_MSECOND;
+ GST_DEBUG ("Adjusted group PCR_offset to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->pcr_offset)));
+ /* Indicate the the previous group is before a reset */
+ prev->flags |= PCR_GROUP_FLAG_RESET;
+ }
+ } else {
+ /* FIXME : Detect gaps if bitrate difference is really too big ? */
+ cur->pcr_offset = prev->pcr_offset + cur->first_pcr - prev->first_pcr;
+ GST_DEBUG ("Assuming there is no gap, setting pcr_offset to %"
+ GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (cur->pcr_offset)));
+ /* Remove the reset and wrapover flag (if it was previously there) */
+ prev->flags &= ~PCR_GROUP_FLAG_RESET;
+ prev->flags &= ~PCR_GROUP_FLAG_WRAPOVER;
+ }
+
+
+ /* Remember prev for the next group evaluation */
+ prev = cur;
+ }
+ }
+
+ static PCROffsetGroup *
+ _new_group (guint64 pcr, guint64 offset, guint64 pcr_offset, guint flags)
+ {
+ PCROffsetGroup *group = g_slice_new0 (PCROffsetGroup);
+
+ GST_DEBUG ("Input PCR %" GST_TIME_FORMAT " offset:%" G_GUINT64_FORMAT
+ " pcr_offset:%" G_GUINT64_FORMAT " flags:%d",
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (pcr)), offset, pcr_offset, flags);
+
+ group->flags = flags;
+ group->values = g_new0 (PCROffset, DEFAULT_ALLOCATED_OFFSET);
+ /* The first pcr/offset diff is always 0/0 */
+ group->values[0].pcr = group->values[0].offset = 0;
+ group->nb_allocated = DEFAULT_ALLOCATED_OFFSET;
+
+ /* Store the full values */
+ group->first_pcr = pcr;
+ group->first_offset = offset;
+ group->pcr_offset = pcr_offset;
+
+ GST_DEBUG ("Created group starting with pcr:%" GST_TIME_FORMAT " offset:%"
+ G_GUINT64_FORMAT " pcr_offset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->first_pcr)),
+ group->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->pcr_offset)));
+
+ return group;
+ }
+
+ static void
+ _insert_group_after (MpegTSPCR * pcrtable, PCROffsetGroup * group,
+ PCROffsetGroup * prev)
+ {
+ if (prev == NULL) {
+ /* First group */
+ pcrtable->groups = g_list_prepend (pcrtable->groups, group);
+ } else {
+ GList *tmp, *toinsert, *prevlist = NULL, *nextlist = NULL;
+ /* Insert before next and prev */
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ if (tmp->data == prev) {
+ prevlist = tmp;
+ nextlist = tmp->next;
+ break;
+ }
+ }
+ if (!prevlist) {
+ /* The non NULL prev given isn't in the list */
+ GST_WARNING ("Request to insert before a group which isn't in the list");
+ pcrtable->groups = g_list_prepend (pcrtable->groups, group);
+ } else {
+ toinsert = g_list_append (NULL, group);
+ toinsert->next = nextlist;
+ toinsert->prev = prevlist;
+ prevlist->next = toinsert;
+ if (nextlist)
+ nextlist->prev = toinsert;
+ }
+ }
+ }
+
+ static void
+ _use_group (MpegTSPCR * pcrtable, PCROffsetGroup * group)
+ {
+ PCROffsetCurrent *current = pcrtable->current;
+
+ memset (current, 0, sizeof (PCROffsetCurrent));
+ current->group = group;
+ current->pending[0] = group->values[group->last_value];
+ current->last_value = current->pending[0];
+ current->write = 1;
+ current->prev = group->values[group->last_value];
+ current->first_pcr = group->first_pcr;
+ current->first_offset = group->first_offset;
+ }
+
+ /* Create a new group with the specified values after prev
+ * Set current to that new group */
+ static void
+ _set_current_group (MpegTSPCR * pcrtable,
+ PCROffsetGroup * prev, guint64 pcr, guint64 offset, gboolean contiguous)
+ {
+ PCROffsetGroup *group;
+ guint flags = 0;
+ guint64 pcr_offset = 0;
+
+ /* Handle wraparound/gap (only if contiguous with previous group) */
+ if (contiguous) {
+ guint64 lastpcr = prev->first_pcr + prev->values[prev->last_value].pcr;
+
+ /* Set CLOSED flag on previous group and remember pcr_offset */
+ prev->flags |= PCR_GROUP_FLAG_CLOSED;
+ pcr_offset = prev->pcr_offset;
+
+ /* Wraparound ? */
+ if (lastpcr > pcr) {
+ /* In offset-mode, a PCR wraparound is only actually consistent if
+ * we have a very high confidence (99% right now, might need to change
+ * later) */
+ if (lastpcr - pcr > (PCR_MAX_VALUE * 99 / 100)) {
+ GST_WARNING ("WRAPAROUND detected. diff %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (lastpcr - pcr)));
+ /* The previous group closed at PCR_MAX_VALUE */
+ pcr_offset += PCR_MAX_VALUE - prev->first_pcr + pcr;
+ } else {
+ GST_WARNING ("RESET detected. diff %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (lastpcr - pcr)));
+ /* The previous group closed at the raw last_pcr diff (+100ms for safety) */
+ pcr_offset += prev->values[prev->last_value].pcr + 100 * PCR_MSECOND;
+ }
+ } else if (lastpcr < pcr - 500 * PCR_MSECOND) {
+ GST_WARNING ("GAP detected. diff %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (pcr - lastpcr)));
+ /* The previous group closed at the raw last_pcr diff (+500ms for safety) */
+ pcr_offset += prev->values[prev->last_value].pcr + 500 * PCR_MSECOND;
+ } else
+ /* Normal continuation (contiguous in time) */
+ pcr_offset += pcr - prev->first_pcr;
+
+ } else if (prev != NULL)
+ /* If we are not contiguous and it's not the first group, the pcr_offset
+ * will be estimated */
+ flags = PCR_GROUP_FLAG_ESTIMATED;
+
+ group = _new_group (pcr, offset, pcr_offset, flags);
+ _use_group (pcrtable, group);
+ _insert_group_after (pcrtable, group, prev);
+ if (!contiguous)
+ _reevaluate_group_pcr_offset (pcrtable, group);
+ }
+
+ static inline void
+ _append_group_values (PCROffsetGroup * group, PCROffset pcroffset)
+ {
+ /* Only append if new values */
+ if (group->values[group->last_value].offset == pcroffset.offset &&
+ group->values[group->last_value].pcr == pcroffset.pcr) {
+ GST_DEBUG ("Same values, ignoring");
+ } else {
+ group->last_value++;
+ /* Resize values if needed */
+ if (G_UNLIKELY (group->nb_allocated == group->last_value)) {
+ group->nb_allocated += DEFAULT_ALLOCATED_OFFSET;
+ group->values =
+ g_realloc (group->values, group->nb_allocated * sizeof (PCROffset));
+ }
+ group->values[group->last_value] = pcroffset;
+ }
+
+ GST_DEBUG ("First PCR:%" GST_TIME_FORMAT " offset:%" G_GUINT64_FORMAT
+ " PCR_offset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->first_pcr)),
+ group->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->pcr_offset)));
+ GST_DEBUG ("Last PCR: +%" GST_TIME_FORMAT " offset: +%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (pcroffset.pcr)), pcroffset.offset);
+ }
+
+ /* Move last values from current (if any) to the current group
+ * and reset current.
+ * Note: This does not set the CLOSED flag (since we have no next
+ * contiguous group) */
+ static void
+ _close_current_group (MpegTSPCR * pcrtable)
+ {
+ PCROffsetCurrent *current = pcrtable->current;
+ PCROffsetGroup *group = current->group;
+
+ if (group == NULL)
+ return;
+ GST_DEBUG ("Closing group and resetting current");
+
+ /* Store last values */
+ _append_group_values (group, current->pending[current->last]);
+ memset (current, 0, sizeof (PCROffsetCurrent));
+ /* And re-evaluate all groups */
+ }
+
+ static void
+ record_pcr (MpegTSPacketizer2 * packetizer, MpegTSPCR * pcrtable,
+ guint64 pcr, guint64 offset)
+ {
+ PCROffsetCurrent *current = pcrtable->current;
+ gint64 corpcr, coroffset;
+
+ packetizer->nb_seen_offsets += 1;
+
+ pcrtable->last_pcrtime = PCRTIME_TO_GSTTIME (pcr);
+ /* FIXME : Invert logic later (probability is higher that we have a
+ * current estimator) */
+
+ /* Check for current */
+ if (G_UNLIKELY (current->group == NULL)) {
+ PCROffsetGroup *prev = NULL;
+ GList *tmp;
+ /* No current estimator. This happens for the initial value, or after
+ * discont and flushes. Figure out where we need to record this position.
+ *
+ * Possible choices:
+ * 1) No groups at all:
+ * Create a new group with pcr/offset
+ * Initialize current to that group
+ * 2) Entirely within an existing group
+ * bail out (FIXME: Make this detection faster)
+ * 3) Not in any group
+ * Create a new group with pcr/offset at the right position
+ * Initialize current to that group
+ */
+ GST_DEBUG ("No current window estimator, Checking for group to use");
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ PCROffsetGroup *group = (PCROffsetGroup *) tmp->data;
+
+ GST_DEBUG ("First PCR:%" GST_TIME_FORMAT " offset:%" G_GUINT64_FORMAT
+ " PCR_offset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->first_pcr)),
+ group->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->pcr_offset)));
+ GST_DEBUG ("Last PCR: +%" GST_TIME_FORMAT " offset: +%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->values[group->last_value].
+ pcr)), group->values[group->last_value].offset);
+ /* Check if before group */
+ if (offset < group->first_offset) {
+ GST_DEBUG ("offset is before that group");
+ break;
+ }
+ /* Check if within group */
+ if (offset <=
+ (group->values[group->last_value].offset + group->first_offset)) {
+ GST_DEBUG ("Already observed PCR offset %" G_GUINT64_FORMAT, offset);
+ return;
+ }
+ /* Check if just after group (i.e. continuation of it) */
+ if (!(group->flags & PCR_GROUP_FLAG_CLOSED) &&
+ pcr - group->first_pcr - group->values[group->last_value].pcr <=
+ 100 * PCR_MSECOND) {
+ GST_DEBUG ("Continuation of existing group");
+ _use_group (pcrtable, group);
+ return;
+ }
+ /* Else after group */
+ prev = group;
+ }
+ _set_current_group (pcrtable, prev, pcr, offset, FALSE);
+ return;
+ }
+
+ corpcr = pcr - current->first_pcr;
+ coroffset = offset - current->first_offset;
+
+ /* FIXME : Detect if we've gone into the next group !
+ * FIXME : Close group when that happens */
+ GST_DEBUG ("first:%d, last:%d, write:%d", current->first, current->last,
+ current->write);
+ GST_DEBUG ("First PCR:%" GST_TIME_FORMAT " offset:%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (current->first_pcr)),
+ current->first_offset);
+ GST_DEBUG ("Last PCR: +%" GST_TIME_FORMAT " offset: +%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (current->pending[current->last].pcr)),
+ current->pending[current->last].offset);
+ GST_DEBUG ("To add (corrected) PCR:%" GST_TIME_FORMAT " offset:%"
+ G_GINT64_FORMAT, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (corpcr)), coroffset);
+
+ /* Do we need to close the current group ? */
+ /* Check for wrapover/discont */
+ if (G_UNLIKELY (corpcr < current->pending[current->last].pcr)) {
+ /* FIXME : ignore very small deltas (< 500ms ?) which are most likely
+ * stray values */
+ GST_DEBUG
+ ("PCR smaller than previously observed one, handling discont/wrapover");
+ /* Take values from current and put them in the current group (closing it) */
+ /* Create new group with new pcr/offset just after the current group
+ * and mark it as a wrapover */
+ /* Initialize current to that group with new values */
+ _append_group_values (current->group, current->pending[current->last]);
+ _set_current_group (pcrtable, current->group, pcr, offset, TRUE);
+ return;
+ }
+ /* If PCR diff is greater than 500ms, create new group */
+ if (G_UNLIKELY (corpcr - current->pending[current->last].pcr >
+ 500 * PCR_MSECOND)) {
+ GST_DEBUG ("New PCR more than 500ms away, handling discont");
+ /* Take values from current and put them in the current group (closing it) */
+ /* Create new group with pcr/offset just after the current group
+ * and mark it as a discont */
+ /* Initialize current to that group with new values */
+ _append_group_values (current->group, current->pending[current->last]);
+ _set_current_group (pcrtable, current->group, pcr, offset, TRUE);
+ return;
+ }
+
+ if (G_UNLIKELY (corpcr == current->last_value.pcr)) {
+ GST_DEBUG ("Ignoring same PCR (stream is drunk)");
+ return;
+ }
+
+ /* update current window */
+ current->pending[current->write].pcr = corpcr;
+ current->pending[current->write].offset = coroffset;
+ current->last_value = current->pending[current->write];
+ current->last = (current->last + 1) % PCR_BITRATE_NEEDED;
+ current->write = (current->write + 1) % PCR_BITRATE_NEEDED;
+
+ GST_DEBUG ("first:%d, last:%d, write:%d", current->first, current->last,
+ current->write);
+ GST_DEBUG ("First PCR:%" GST_TIME_FORMAT " offset:%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (current->first_pcr)),
+ current->first_offset);
+ GST_DEBUG ("Last PCR: +%" GST_TIME_FORMAT " offset: +%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (current->pending[current->last].pcr)),
+ current->pending[current->last].offset);
+
+ /* If we haven't stored enough values, bail out */
+ if (current->write != current->first) {
+ GST_DEBUG
+ ("Not enough observations to calculate bitrate (first:%d, last:%d)",
+ current->first, current->last);
+ return;
+ }
+
+ /* If we are at least 1s away from reference value AND we have filled our
+ * window, we can start comparing bitrates */
+ if (current->pending[current->first].pcr - current->prev.pcr > PCR_SECOND) {
+ /* Calculate window bitrate */
+ current->cur_bitrate = gst_util_uint64_scale (PCR_SECOND,
+ current->pending[current->last].offset -
+ current->pending[current->first].offset,
+ current->pending[current->last].pcr -
+ current->pending[current->first].pcr);
+ GST_DEBUG ("Current bitrate is now %" G_GUINT64_FORMAT,
+ current->cur_bitrate);
+
+ /* Calculate previous bitrate */
+ current->prev_bitrate =
+ gst_util_uint64_scale (PCR_SECOND,
+ current->pending[current->first].offset - current->prev.offset,
+ current->pending[current->first].pcr - current->prev.pcr);
+ GST_DEBUG ("Previous group bitrate now %" G_GUINT64_FORMAT,
+ current->prev_bitrate);
+
+ /* FIXME : Better bitrate changes ? Currently 10% changes */
+ if (ABSDIFF (current->cur_bitrate,
+ current->prev_bitrate) * 10 > current->prev_bitrate) {
+ GST_DEBUG ("Current bitrate changed by more than 10%% (old:%"
+ G_GUINT64_FORMAT " new:%" G_GUINT64_FORMAT ")", current->prev_bitrate,
+ current->cur_bitrate);
+ /* If we detected a change in bitrate, this means that
+ * d(first - prev) is a different bitrate than d(last - first).
+ *
+ * Two conclusions can be made:
+ * 1) d(first - prev) is a complete bitrate "chain" (values between the
+ * reference value and first pending value have consistent bitrate).
+ * 2) next values (from second pending value onwards) will no longer have
+ * the same bitrate.
+ *
+ * The question remains as to how long the new bitrate change is going to
+ * last for (it might be short or longer term). For this we need to restart
+ * bitrate estimation.
+ *
+ * * We move over first to the last value of group (a new chain ends and
+ * starts from there)
+ * * We remember that last group value as our new window reference
+ * * We restart our window filing from the last observed value
+ *
+ * Once our new window is filled we will end up in two different scenarios:
+ * 1) Either the bitrate change was consistent, and therefore the bitrate
+ * will have remained constant over at least 2 window length
+ * 2) The bitrate change was very short (1 window duration) and we will
+ * close that chain and restart again.
+ * X) And of course if any discont/gaps/wrapover happen in the meantime they
+ * will also close the group.
+ */
+ _append_group_values (current->group, current->pending[current->first]);
+ current->prev = current->pending[current->first];
+ current->first = current->last;
+ current->write = (current->first + 1) % PCR_BITRATE_NEEDED;
+ return;
+ }
+ }
+
+ /* Update read position */
+ current->first = (current->first + 1) % PCR_BITRATE_NEEDED;
+ }
+
+
+ /* convert specified offset into stream time */
+ GstClockTime
+ mpegts_packetizer_offset_to_ts (MpegTSPacketizer2 * packetizer,
+ guint64 offset, guint16 pid)
+ {
+ PCROffsetGroup *last;
+ MpegTSPCR *pcrtable;
+ GList *tmp;
+ GstClockTime res;
+ guint64 lastpcr, lastoffset;
+
+ GST_DEBUG ("offset %" G_GUINT64_FORMAT, offset);
+
+ if (G_UNLIKELY (!packetizer->calculate_offset))
+ return GST_CLOCK_TIME_NONE;
+
+ if (G_UNLIKELY (packetizer->refoffset == -1))
+ return GST_CLOCK_TIME_NONE;
+
+ if (G_UNLIKELY (offset < packetizer->refoffset))
+ return GST_CLOCK_TIME_NONE;
+
+ PACKETIZER_GROUP_LOCK (packetizer);
+
+ pcrtable = get_pcr_table (packetizer, pid);
+
+ if (g_list_length (pcrtable->groups) < 1) {
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ GST_WARNING ("Not enough observations to return a duration estimate");
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ if (g_list_length (pcrtable->groups) > 1) {
+ GST_LOG ("Using last group");
+
+ /* FIXME : Refine this later to use neighbouring groups */
+ tmp = g_list_last (pcrtable->groups);
+ last = tmp->data;
+
+ if (G_UNLIKELY (last->flags & PCR_GROUP_FLAG_ESTIMATED))
+ _reevaluate_group_pcr_offset (pcrtable, last);
+
+ /* lastpcr is the full value in PCR from the first first chunk of data */
+ lastpcr = last->values[last->last_value].pcr + last->pcr_offset;
+ /* lastoffset is the full offset from the first chunk of data */
+ lastoffset =
+ last->values[last->last_value].offset + last->first_offset -
+ packetizer->refoffset;
+ } else {
+ PCROffsetCurrent *current = pcrtable->current;
+
+ if (!current->group) {
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ GST_LOG ("No PCR yet");
+ return GST_CLOCK_TIME_NONE;
+ }
+ /* If doing progressive read, use current */
+ GST_LOG ("Using current group");
+ lastpcr = current->group->pcr_offset + current->pending[current->last].pcr;
+ lastoffset = current->first_offset + current->pending[current->last].offset;
+ }
+ GST_DEBUG ("lastpcr:%" GST_TIME_FORMAT " lastoffset:%" G_GUINT64_FORMAT
+ " refoffset:%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (lastpcr)), lastoffset,
+ packetizer->refoffset);
+
+ /* Convert byte difference into time difference (and transformed from 27MHz to 1GHz) */
+ res =
+ PCRTIME_TO_GSTTIME (gst_util_uint64_scale (offset - packetizer->refoffset,
+ lastpcr, lastoffset));
+
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+
+ GST_DEBUG ("Returning timestamp %" GST_TIME_FORMAT " for offset %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (res), offset);
+
+ return res;
+ }
+
+ /* Input : local PTS (in GHz units)
+ * Return : Stream time (in GHz units) */
+ GstClockTime
+ mpegts_packetizer_pts_to_ts (MpegTSPacketizer2 * packetizer,
+ GstClockTime pts, guint16 pcr_pid)
+ {
+ GstClockTime res = GST_CLOCK_TIME_NONE;
+ MpegTSPCR *pcrtable;
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ GstClockTime tmp;
++#endif
+
+ PACKETIZER_GROUP_LOCK (packetizer);
+ pcrtable = get_pcr_table (packetizer, pcr_pid);
+
+ if (!GST_CLOCK_TIME_IS_VALID (pcrtable->base_time) && pcr_pid == 0x1fff &&
+ GST_CLOCK_TIME_IS_VALID (packetizer->last_in_time)) {
+ pcrtable->base_time = packetizer->last_in_time;
+ pcrtable->base_pcrtime = pts;
+ }
+
+ /* Use clock skew if present */
+ if (packetizer->calculate_skew
+ && GST_CLOCK_TIME_IS_VALID (pcrtable->base_time)) {
+ GST_DEBUG ("pts %" GST_TIME_FORMAT " base_pcrtime:%" GST_TIME_FORMAT
+ " base_time:%" GST_TIME_FORMAT " pcroffset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pts),
+ GST_TIME_ARGS (pcrtable->base_pcrtime),
+ GST_TIME_ARGS (pcrtable->base_time),
+ GST_TIME_ARGS (pcrtable->pcroffset));
+ res = pts + pcrtable->pcroffset;
+
+ /* Don't return anything if we differ too much against last seen PCR */
+ /* FIXME : Ideally we want to figure out whether we have a wraparound or
+ * a reset so we can provide actual values.
+ * That being said, this will only happen for the small interval of time
+ * where PTS/DTS are wrapping just before we see the first reset/wrap PCR
+ */
++ /*
++ * For some HLS Live servers (e.g tvpstream.tvp.pl ) , we observe erronous PCR values.
++ * As a result , last_pcrtime comes faulty which causes PTS values not to be calculated at all and buffers are dropped.
++ * We are currently ignoring the check for handling the erronous server PCR case.
++ */
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ tmp = pcrtable->base_time + pcrtable->skew;
++ if (packetizer->is_live_stream) {
++ if (G_UNLIKELY (ABSDIFF (res, pcrtable->last_pcrtime) > 15 * GST_SECOND))
++ GST_WARNING
++ ("Live Stream : Server sending erronous PCR values : Recalculating!!");
++
++ if (tmp + res > pcrtable->base_pcrtime)
++ res += tmp - pcrtable->base_pcrtime;
++ else
++ res = GST_CLOCK_TIME_NONE;
++ } else {
++ if (G_UNLIKELY (ABSDIFF (res, pcrtable->last_pcrtime) > 15 * GST_SECOND))
++ GST_WARNING
++ ("VOD Stream : Server sending erronous PCR values : Maintaining old ts value !!!");
++ else if (tmp + res > pcrtable->base_pcrtime)
++ res += tmp - pcrtable->base_pcrtime;
++ else
++ res = GST_CLOCK_TIME_NONE;
++ }
++#else
+ if (G_UNLIKELY (pcr_pid != 0x1fff &&
+ ABSDIFF (res, pcrtable->last_pcrtime) > 15 * GST_SECOND))
+ res = GST_CLOCK_TIME_NONE;
+ else {
+ GstClockTime tmp = pcrtable->base_time + pcrtable->skew;
+ if (tmp + res >= pcrtable->base_pcrtime)
+ res += tmp - pcrtable->base_pcrtime;
+ else
+ res = GST_CLOCK_TIME_NONE;
+ }
++#endif
+ } else if (packetizer->calculate_offset && pcrtable->groups) {
+ gint64 refpcr = G_MAXINT64, refpcroffset;
+ PCROffsetGroup *group = pcrtable->current->group;
+
+ /* Generic calculation:
+ * Stream Time = PTS - first group PCR + group PCR_offset
+ *
+ * In case of wrapover:
+ * Stream Time = PTS + MAX_PCR - first group PCR + group PCR_offset
+ * (which we actually do by using first group PCR -= MAX_PCR in order
+ * to end up with the same calculation as for non-wrapover) */
+
+ if (group) {
+ /* If we have a current group the value is pretty much guaranteed */
+ GST_DEBUG ("Using current First PCR:%" GST_TIME_FORMAT " offset:%"
+ G_GUINT64_FORMAT " PCR_offset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->first_pcr)),
+ group->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->pcr_offset)));
+ refpcr = group->first_pcr;
+ refpcroffset = group->pcr_offset;
+ if (pts < PCRTIME_TO_GSTTIME (refpcr)) {
+ /* Only apply wrapover if we're certain it is, and avoid
+ * returning bogus values if it's a PTS/DTS which is *just*
+ * before the start of the current group
+ */
+ if (PCRTIME_TO_GSTTIME (refpcr) - pts > GST_SECOND) {
+ pts += PCR_GST_MAX_VALUE;
+ } else
+ refpcr = G_MAXINT64;
+ }
+ } else {
+ GList *tmp;
+ /* Otherwise, find a suitable group */
+
+ GST_DEBUG ("Find group for current offset %" G_GUINT64_FORMAT,
+ packetizer->offset);
+
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ PCROffsetGroup *tgroup = tmp->data;
+ GST_DEBUG ("Trying First PCR:%" GST_TIME_FORMAT " offset:%"
+ G_GUINT64_FORMAT " PCR_offset:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->first_pcr)),
+ tgroup->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->pcr_offset)));
+ /* Gone too far ? */
+ if (tgroup->first_offset > packetizer->offset) {
+ /* If there isn't a pending reset, use that value */
+ if (group) {
+ GST_DEBUG ("PTS is %" GST_TIME_FORMAT " into group",
+ GST_TIME_ARGS (pts - PCRTIME_TO_GSTTIME (group->first_pcr)));
+ }
+ break;
+ }
+ group = tgroup;
+ /* In that group ? */
+ if (group->first_offset + group->values[group->last_value].offset >
+ packetizer->offset) {
+ GST_DEBUG ("PTS is %" GST_TIME_FORMAT " into group",
+ GST_TIME_ARGS (pts - PCRTIME_TO_GSTTIME (group->first_pcr)));
+ break;
+ }
+ }
+ if (group && !(group->flags & PCR_GROUP_FLAG_RESET)) {
+ GST_DEBUG ("Using group !");
+ refpcr = group->first_pcr;
+ refpcroffset = group->pcr_offset;
+ if (pts < PCRTIME_TO_GSTTIME (refpcr)) {
+ if (PCRTIME_TO_GSTTIME (refpcr) - pts > GST_SECOND)
+ pts += PCR_GST_MAX_VALUE;
+ else
+ refpcr = G_MAXINT64;
+ }
+ }
+ }
+ if (refpcr != G_MAXINT64)
+ res =
+ pts - PCRTIME_TO_GSTTIME (refpcr) + PCRTIME_TO_GSTTIME (refpcroffset);
+ else
+ GST_WARNING ("No groups, can't calculate timestamp");
+ } else
+ GST_WARNING ("Not enough information to calculate proper timestamp");
+
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+
+ GST_DEBUG ("Returning timestamp %" GST_TIME_FORMAT " for pts %"
+ GST_TIME_FORMAT " pcr_pid:0x%04x", GST_TIME_ARGS (res),
+ GST_TIME_ARGS (pts), pcr_pid);
+ return res;
+ }
+
+ /* Stream time to offset */
+ guint64
+ mpegts_packetizer_ts_to_offset (MpegTSPacketizer2 * packetizer,
+ GstClockTime ts, guint16 pcr_pid)
+ {
+ MpegTSPCR *pcrtable;
+ guint64 res;
+ PCROffsetGroup *nextgroup = NULL, *prevgroup = NULL;
+ guint64 querypcr, firstpcr, lastpcr, firstoffset, lastoffset;
+ PCROffsetCurrent *current;
+ GList *tmp;
+
+ if (!packetizer->calculate_offset)
+ return -1;
+
+ PACKETIZER_GROUP_LOCK (packetizer);
+ pcrtable = get_pcr_table (packetizer, pcr_pid);
+
+ if (pcrtable->groups == NULL) {
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ return -1;
+ }
+
+ querypcr = GSTTIME_TO_PCRTIME (ts);
+
+ GST_DEBUG ("Searching offset for ts %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+
+ /* First check if we're within the current pending group */
+ current = pcrtable->current;
+ if (current && current->group && (querypcr >= current->group->pcr_offset) &&
+ querypcr - current->group->pcr_offset <=
+ current->pending[current->last].pcr) {
+ GST_DEBUG ("pcr is in current group");
+ nextgroup = current->group;
+ goto calculate_points;
+ }
+
+ /* Find the neighbouring groups */
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ nextgroup = (PCROffsetGroup *) tmp->data;
+
+ GST_DEBUG ("Trying group PCR %" GST_TIME_FORMAT " (offset %"
+ G_GUINT64_FORMAT " pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (nextgroup->first_pcr)),
+ nextgroup->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (nextgroup->pcr_offset)));
+
+ /* Check if we've gone too far */
+ if (nextgroup->pcr_offset > querypcr) {
+ GST_DEBUG ("pcr is before that group");
+ break;
+ }
+
+ if (tmp->next == NULL) {
+ GST_DEBUG ("pcr is beyond last group");
+ break;
+ }
+
+ prevgroup = nextgroup;
+
+ /* Maybe it's in this group */
+ if (nextgroup->values[nextgroup->last_value].pcr +
+ nextgroup->pcr_offset >= querypcr) {
+ GST_DEBUG ("pcr is in that group");
+ break;
+ }
+ }
+
+ calculate_points:
+
+ GST_DEBUG ("nextgroup:%p, prevgroup:%p", nextgroup, prevgroup);
+
+ if (nextgroup == prevgroup || prevgroup == NULL) {
+ /* We use the current group to calculate position:
+ * * if the PCR is within this group
+ * * if there is only one group to use for calculation
+ */
+ GST_DEBUG ("In group or after last one");
+ lastoffset = firstoffset = nextgroup->first_offset;
+ lastpcr = firstpcr = nextgroup->pcr_offset;
+ if (current && nextgroup == current->group) {
+ lastoffset += current->pending[current->last].offset;
+ lastpcr += current->pending[current->last].pcr;
+ } else {
+ lastoffset += nextgroup->values[nextgroup->last_value].offset;
+ lastpcr += nextgroup->values[nextgroup->last_value].pcr;
+ }
+ } else {
+ GST_DEBUG ("Between group");
+ lastoffset = nextgroup->first_offset;
+ lastpcr = nextgroup->pcr_offset;
+ firstoffset =
+ prevgroup->values[prevgroup->last_value].offset +
+ prevgroup->first_offset;
+ firstpcr =
+ prevgroup->values[prevgroup->last_value].pcr + prevgroup->pcr_offset;
+ }
+
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+
+ GST_DEBUG ("Using prev PCR %" G_GUINT64_FORMAT " offset %" G_GUINT64_FORMAT,
+ firstpcr, firstoffset);
+ GST_DEBUG ("Using last PCR %" G_GUINT64_FORMAT " offset %" G_GUINT64_FORMAT,
+ lastpcr, lastoffset);
+
+ res = firstoffset;
+ if (lastpcr != firstpcr)
+ res += gst_util_uint64_scale (querypcr - firstpcr,
+ lastoffset - firstoffset, lastpcr - firstpcr);
+
+ GST_DEBUG ("Returning offset %" G_GUINT64_FORMAT " for ts %"
+ GST_TIME_FORMAT, res, GST_TIME_ARGS (ts));
+
+ return res;
+ }
+
+ void
+ mpegts_packetizer_set_reference_offset (MpegTSPacketizer2 * packetizer,
+ guint64 refoffset)
+ {
+ GST_DEBUG ("Setting reference offset to %" G_GUINT64_FORMAT, refoffset);
+
+ PACKETIZER_GROUP_LOCK (packetizer);
+ packetizer->refoffset = refoffset;
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ }
+
+ void
+ mpegts_packetizer_set_pcr_discont_threshold (MpegTSPacketizer2 * packetizer,
+ GstClockTime threshold)
+ {
+ PACKETIZER_GROUP_LOCK (packetizer);
+ packetizer->pcr_discont_threshold = threshold;
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ }
+
+ void
+ mpegts_packetizer_set_current_pcr_offset (MpegTSPacketizer2 * packetizer,
+ GstClockTime offset, guint16 pcr_pid)
+ {
+ guint64 pcr_offset;
+ gint64 delta;
+ MpegTSPCR *pcrtable;
+ PCROffsetGroup *group;
+ GList *tmp;
+ gboolean apply = FALSE;
+
+ /* fast path */
+ PACKETIZER_GROUP_LOCK (packetizer);
+ pcrtable = get_pcr_table (packetizer, pcr_pid);
+
+ if (pcrtable == NULL || pcrtable->current->group == NULL) {
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ return;
+ }
+
+ pcr_offset = GSTTIME_TO_PCRTIME (offset);
+
+ /* Pick delta from *first* group */
+ if (pcrtable->groups)
+ group = pcrtable->groups->data;
+ else
+ group = pcrtable->current->group;
+ GST_DEBUG ("Current group PCR %" GST_TIME_FORMAT " (offset %"
+ G_GUINT64_FORMAT " pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->first_pcr)),
+ group->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (group->pcr_offset)));
+
+ /* Remember the difference between previous initial pcr_offset and
+ * new initial pcr_offset */
+ delta = pcr_offset - group->pcr_offset;
+ if (delta == 0) {
+ GST_DEBUG ("No shift to apply");
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ return;
+ }
+ GST_DEBUG ("Shifting groups by %" GST_TIME_FORMAT
+ " for new initial pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (delta)), GST_TIME_ARGS (offset));
+
+ for (tmp = pcrtable->groups; tmp; tmp = tmp->next) {
+ PCROffsetGroup *tgroup = (tmp->data);
+ if (tgroup == group)
+ apply = TRUE;
+ if (apply) {
+ tgroup->pcr_offset += delta;
+ GST_DEBUG ("Update group PCR %" GST_TIME_FORMAT " (offset %"
+ G_GUINT64_FORMAT " pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->first_pcr)),
+ tgroup->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->pcr_offset)));
+ } else
+ GST_DEBUG ("Not modifying group PCR %" GST_TIME_FORMAT " (offset %"
+ G_GUINT64_FORMAT " pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->first_pcr)),
+ tgroup->first_offset,
+ GST_TIME_ARGS (PCRTIME_TO_GSTTIME (tgroup->pcr_offset)));
+ }
+ PACKETIZER_GROUP_UNLOCK (packetizer);
+ }
++
++#ifdef TIZEN_FEATURE_TSDEMUX_INVALID_PCR_PID
++GstClockTime
++mpegts_packetizer_get_pcr_base_time (MpegTSPacketizer2 * packetizer,
++ guint16 pcr_pid)
++{
++ GstClockTime res = GST_CLOCK_TIME_NONE;
++ MpegTSPCR *pcrtable;
++
++ PACKETIZER_GROUP_LOCK (packetizer);
++ pcrtable = get_pcr_table (packetizer, pcr_pid);
++
++ if (GST_CLOCK_TIME_IS_VALID (pcrtable->base_time) && (pcr_pid != 0x1fff))
++ res = pcrtable->base_time;
++
++ PACKETIZER_GROUP_UNLOCK (packetizer);
++ return res;
++}
++#endif
--- /dev/null
- * mpegtspacketizer.h -
+ /*
- *
++ * mpegtspacketizer.h -
+ * Copyright (C) 2007 Alessandro Decina
- /* the spec says sub_table_extension is the fourth and fifth byte of a
- * section when the section_syntax_indicator is set to a value of "1". If
++ *
+ * Authors:
+ * Alessandro Decina <alessandro@nnva.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef GST_MPEGTS_PACKETIZER_H
+ #define GST_MPEGTS_PACKETIZER_H
+
+ #include <gst/gst.h>
+ #include <gst/base/gstadapter.h>
+ #include <glib.h>
+
+ #include <gst/mpegts/mpegts.h>
+ #include "gstmpegdefs.h"
+
+ #define MPEGTS_NORMAL_PACKETSIZE 188
+ #define MPEGTS_M2TS_PACKETSIZE 192
+ #define MPEGTS_DVB_ASI_PACKETSIZE 204
+ #define MPEGTS_ATSC_PACKETSIZE 208
+
+ #define MPEGTS_MIN_PACKETSIZE MPEGTS_NORMAL_PACKETSIZE
+ #define MPEGTS_MAX_PACKETSIZE MPEGTS_ATSC_PACKETSIZE
+
+ #define MPEGTS_AFC_DISCONTINUITY_FLAG 0x80
+ #define MPEGTS_AFC_RANDOM_ACCESS_FLAG 0x40
+ #define MPEGTS_AFC_ELEMENTARY_STREAM_PRIORITY 0x20
+ #define MPEGTS_AFC_PCR_FLAG 0x10
+ #define MPEGTS_AFC_OPCR_FLAG 0x08
+ #define MPEGTS_AFC_SPLICING_POINT_FLAG 0x04
+ #define MPEGTS_AFC_TRANSPORT_PRIVATE_DATA_FLAG 0x02
+ #define MPEGTS_AFC_EXTENSION_FLAG 0x01
+
+ #define MAX_WINDOW 512
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_MPEGTS_PACKETIZER \
+ (mpegts_packetizer_get_type())
+ #define GST_MPEGTS_PACKETIZER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MPEGTS_PACKETIZER,MpegTSPacketizer2))
+ #define GST_MPEGTS_PACKETIZER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MPEGTS_PACKETIZER,MpegTSPacketizer2Class))
+ #define GST_IS_MPEGTS_PACKETIZER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MPEGTS_PACKETIZER))
+ #define GST_IS_MPEGTS_PACKETIZER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MPEGTS_PACKETIZER))
+
+ typedef struct _MpegTSPacketizer2 MpegTSPacketizer2;
+ typedef struct _MpegTSPacketizer2Class MpegTSPacketizer2Class;
+
+ typedef struct
+ {
+ guint16 pid;
+ guint continuity_counter;
+
+ /* Section data (always newly allocated) */
+ guint8 *section_data;
+ /* Current offset in section_data */
+ guint16 section_offset;
+
+ /* Values for pending section */
+ /* table_id of the pending section_data */
+ guint8 table_id;
+ guint section_length;
+ guint8 version_number;
+ guint16 subtable_extension;
+ guint8 section_number;
+ guint8 last_section_number;
+
+ GSList *subtables;
+
+ /* Upstream offset of the data contained in the section */
+ guint64 offset;
+ } MpegTSPacketizerStream;
+
+ /* Maximum number of MpegTSPcr
+ * 256 should be sufficient for most multiplexes */
+ #define MAX_PCR_OBS_CHANNELS 256
+
+ /* PCR/offset structure */
+ typedef struct _PCROffset
+ {
+ /* PCR value (units: 1/27MHz) */
+ guint64 pcr;
+
+ /* The offset (units: bytes) */
+ guint64 offset;
+ } PCROffset;
+
+ /* Flags used on groups */
+ enum
+ {
+ /* Closed groups: There is a contiguous next group */
+ PCR_GROUP_FLAG_CLOSED = 1 << 0,
+ /* estimated: the pcr_offset has been estimated and is not
+ * guaranteed to be 100% accurate */
+ PCR_GROUP_FLAG_ESTIMATED = 1 << 1,
+ /* reset: there is a pcr reset between the end of this
+ * group and the next one.
+ * This flag is exclusive with CLOSED. */
+ PCR_GROUP_FLAG_RESET = 1 << 2,
+ /* reset: there is a pcr wrapover between the end of this
+ * group and the next one.
+ * This flag is exclusive with CLOSED. */
+ PCR_GROUP_FLAG_WRAPOVER = 1 << 3
+ };
+
+
+
+ /* PCROffsetGroup: A group of PCR observations.
+ * All values in a group have got the same reference pcr and
+ * byte offset (first_pcr/first_offset).
+ */
+ #define DEFAULT_ALLOCATED_OFFSET 16
+ typedef struct _PCROffsetGroup
+ {
+ /* Flags (see PCR_GROUP_FLAG_* above) */
+ guint flags;
+
+ /* First raw PCR of this group. Units: 1/27MHz.
+ * All values[].pcr are differences against first_pcr */
+ guint64 first_pcr;
+ /* Offset of this group in bytes.
+ * All values[].offset are differences against first_offset */
+ guint64 first_offset;
+
+ /* Dynamically allocated table of PCROffset */
+ PCROffset *values;
+ /* number of PCROffset allocated in values */
+ guint nb_allocated;
+ /* number of *actual* PCROffset contained in values */
+ guint last_value;
+
+ /* Offset since the very first PCR value observed in the whole
+ * stream. Units: 1/27MHz.
+ * This will take into account gaps/wraparounds/resets/... and is
+ * used to determine running times.
+ * The value is only guaranteed to be 100% accurate if the group
+ * does not have the ESTIMATED flag.
+ * If the value is estimated, the pcr_offset shall be recalculated
+ * (based on previous groups) whenever it is accessed.
+ */
+ guint64 pcr_offset;
+
+ /* FIXME : Cache group bitrate ? */
+ } PCROffsetGroup;
+
+ /* Number of PCRs needed before bitrate estimation can start */
+ /* Note: the reason we use 10 is because PCR should normally be
+ * received at least every 100ms so this gives us close to
+ * a 1s moving window to calculate bitrate */
+ #define PCR_BITRATE_NEEDED 10
+
+ /* PCROffsetCurrent: The PCR/Offset window iterator
+ * This is used to estimate/observe incoming PCR/offset values
+ * Points to a group (which it is filling) */
+ typedef struct _PCROffsetCurrent
+ {
+ /* The PCROffsetGroup we are filling.
+ * If NULL, a group needs to be identified */
+ PCROffsetGroup *group;
+
+ /* Table of pending values we are iterating over */
+ PCROffset pending[PCR_BITRATE_NEEDED];
+
+ /* base offset/pcr from the group */
+ guint64 first_pcr;
+ guint64 first_offset;
+
+ /* The previous reference PCROffset
+ * This corresponds to the last entry of the group we are filling
+ * and is used to calculate prev_bitrate */
+ PCROffset prev;
+
+ /* The last PCROffset in pending[] */
+ PCROffset last_value;
+
+ /* Location of first pending PCR/offset observation in pending */
+ guint first;
+ /* Location of last pending PCR/offset observation in pending */
+ guint last;
+ /* Location of next write in pending */
+ guint write;
+
+ /* bitrate is always in bytes per second */
+
+ /* cur_bitrate is the bitrate of the pending values: d(last-first) */
+ guint64 cur_bitrate;
+
+ /* prev_bitrate is the bitrate between reference PCROffset
+ * and the first pending value. Used to detect changes
+ * in bitrate */
+ guint64 prev_bitrate;
+ } PCROffsetCurrent;
+
+ typedef struct _MpegTSPCR
+ {
+ guint16 pid;
+
+ /* Following variables are only active/used when
+ * calculate_skew is TRUE */
+ GstClockTime base_time;
+ GstClockTime base_pcrtime;
+ GstClockTime prev_out_time;
+ GstClockTime prev_in_time;
+ GstClockTime last_pcrtime;
+ gint64 window[MAX_WINDOW];
+ guint window_pos;
+ guint window_size;
+ gboolean window_filling;
+ gint64 window_min;
+ gint64 skew;
+ gint64 prev_send_diff;
+
+ /* Offset to apply to PCR to handle wraparounds */
+ guint64 pcroffset;
+
+ /* Used for bitrate calculation */
+ /* List of PCR/offset observations */
+ GList *groups;
+
+ /* Current PCR/offset observations (used to update pcroffsets) */
+ PCROffsetCurrent *current;
+ } MpegTSPCR;
+
+ struct _MpegTSPacketizer2 {
+ GObject parent;
+
+ GMutex group_lock;
+
+ GstAdapter *adapter;
+ /* streams hashed by pid */
+ /* FIXME : be more memory efficient (see how it's done in mpegtsbase) */
+ MpegTSPacketizerStream **streams;
+ gboolean disposed;
+ guint16 packet_size;
+
+ /* current offset of the tip of the adapter */
+ guint64 offset;
+ gboolean empty;
+
+ /* clock skew calculation */
+ gboolean calculate_skew;
+
+ /* offset/bitrate calculator */
+ gboolean calculate_offset;
+
+ /* Shortcuts for adapter usage */
+ guint8 *map_data;
+ gsize map_offset;
+ gsize map_size;
+ gboolean need_sync;
+
+ /* Reference offset */
+ guint64 refoffset;
+
+ /* Number of seen pcr/offset observations (FIXME : kill later) */
+ guint nb_seen_offsets;
+
+ /* Last inputted timestamp */
+ GstClockTime last_in_time;
+
++#ifdef TIZEN_FEATURE_TSDEMUX_MODIFICATION
++ gboolean is_live_stream;
++ gboolean need_pmt_update;
++#endif
++
+ /* offset to observations table */
+ guint8 pcrtablelut[0x2000];
+ MpegTSPCR *observations[MAX_PCR_OBS_CHANNELS];
+ guint8 lastobsid;
+ GstClockTime pcr_discont_threshold;
+
+ /* PTS/DTS of last buffer */
+ GstClockTime last_pts;
+ GstClockTime last_dts;
+ };
+
+ struct _MpegTSPacketizer2Class {
+ GObjectClass object_class;
+ };
+
+ #define FLAGS_SCRAMBLED(f) (f & 0xc0)
+ #define FLAGS_HAS_AFC(f) (f & 0x20)
+ #define FLAGS_HAS_PAYLOAD(f) (f & 0x10)
+ #define FLAGS_CONTINUITY_COUNTER(f) (f & 0x0f)
+
+ typedef struct
+ {
+ gint16 pid;
+ guint8 payload_unit_start_indicator;
+ guint8 scram_afc_cc;
+ guint8 *payload;
+
+ guint8 *data_start;
+ guint8 *data_end;
+ guint8 *data;
+
+ guint8 afc_flags;
+ guint64 pcr;
+ guint64 offset;
+ } MpegTSPacketizerPacket;
+
+ typedef struct
+ {
+ guint8 table_id;
++ /* the spec says sub_table_extension is the fourth and fifth byte of a
++ * section when the section_syntax_indicator is set to a value of "1". If
+ * section_syntax_indicator is 0, sub_table_extension will be set to 0 */
+ guint16 subtable_extension;
+ guint8 version_number;
+ guint8 last_section_number;
+ /* table of bits, whether the section was seen or not.
+ * Use MPEGTS_BIT_* macros to check */
+ /* Size is 32, because there's a maximum of 256 (32*8) section_number */
+ guint8 seen_section[32];
+ } MpegTSPacketizerStreamSubtable;
+
+ #define MPEGTS_BIT_SET(field, offs) ((field)[(offs) >> 3] |= (1 << ((offs) & 0x7)))
+ #define MPEGTS_BIT_UNSET(field, offs) ((field)[(offs) >> 3] &= ~(1 << ((offs) & 0x7)))
+ #define MPEGTS_BIT_IS_SET(field, offs) ((field)[(offs) >> 3] & (1 << ((offs) & 0x7)))
+
+ typedef enum {
+ PACKET_BAD = FALSE,
+ PACKET_OK = TRUE,
+ PACKET_NEED_MORE
+ } MpegTSPacketizerPacketReturn;
+
+ G_GNUC_INTERNAL GType mpegts_packetizer_get_type(void);
+
+ G_GNUC_INTERNAL MpegTSPacketizer2 *mpegts_packetizer_new (void);
+ G_GNUC_INTERNAL void mpegts_packetizer_clear (MpegTSPacketizer2 *packetizer);
+ G_GNUC_INTERNAL void mpegts_packetizer_flush (MpegTSPacketizer2 *packetizer, gboolean hard);
+ G_GNUC_INTERNAL void mpegts_packetizer_push (MpegTSPacketizer2 *packetizer, GstBuffer *buffer);
+ G_GNUC_INTERNAL gboolean mpegts_packetizer_has_packets (MpegTSPacketizer2 *packetizer);
+ G_GNUC_INTERNAL MpegTSPacketizerPacketReturn mpegts_packetizer_next_packet (MpegTSPacketizer2 *packetizer,
+ MpegTSPacketizerPacket *packet);
+ G_GNUC_INTERNAL MpegTSPacketizerPacketReturn
+ mpegts_packetizer_process_next_packet(MpegTSPacketizer2 * packetizer);
+ G_GNUC_INTERNAL void mpegts_packetizer_clear_packet (MpegTSPacketizer2 *packetizer,
+ MpegTSPacketizerPacket *packet);
+ G_GNUC_INTERNAL void mpegts_packetizer_remove_stream(MpegTSPacketizer2 *packetizer,
+ gint16 pid);
+
+ G_GNUC_INTERNAL GstMpegtsSection *mpegts_packetizer_push_section (MpegTSPacketizer2 *packetzer,
+ MpegTSPacketizerPacket *packet, GList **remaining);
+
+ /* Only valid if calculate_offset is TRUE */
+ G_GNUC_INTERNAL GstClockTime
+ mpegts_packetizer_offset_to_ts (MpegTSPacketizer2 * packetizer,
+ guint64 offset, guint16 pcr_pid);
+ G_GNUC_INTERNAL guint64
+ mpegts_packetizer_ts_to_offset (MpegTSPacketizer2 * packetizer,
+ GstClockTime ts, guint16 pcr_pid);
+ G_GNUC_INTERNAL GstClockTime
+ mpegts_packetizer_pts_to_ts (MpegTSPacketizer2 * packetizer,
+ GstClockTime pts, guint16 pcr_pid);
+ G_GNUC_INTERNAL GstClockTime
+ mpegts_packetizer_get_current_time (MpegTSPacketizer2 * packetizer,
+ guint16 pcr_pid);
+ G_GNUC_INTERNAL void
+ mpegts_packetizer_set_current_pcr_offset (MpegTSPacketizer2 * packetizer,
+ GstClockTime offset, guint16 pcr_pid);
+ G_GNUC_INTERNAL void
+ mpegts_packetizer_set_reference_offset (MpegTSPacketizer2 * packetizer,
+ guint64 refoffset);
+ G_GNUC_INTERNAL void
+ mpegts_packetizer_set_pcr_discont_threshold (MpegTSPacketizer2 * packetizer,
+ GstClockTime threshold);
++#ifdef TIZEN_FEATURE_TSDEMUX_INVALID_PCR_PID
++G_GNUC_INTERNAL GstClockTime
++mpegts_packetizer_get_pcr_base_time (MpegTSPacketizer2 * packetizer,
++ guint16 pcr_pid);
++#endif
+ G_END_DECLS
+
+ #endif /* GST_MPEGTS_PACKETIZER_H */
--- /dev/null
+ /*
+ * tsdemux.c
+ * Copyright (C) 2009 Zaheer Abbas Merali
+ * 2010 Edward Hervey
+ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
+ * Author: Edward Hervey <bilboed@bilboed.com>, Collabora Ltd.
+ *
+ * Authors:
+ * Zaheer Abbas Merali <zaheerabbas at merali dot org>
+ * Edward Hervey <edward.hervey@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <stdlib.h>
+ #include <string.h>
+
+ #include <glib.h>
+ #include <gst/tag/tag.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/base/base.h>
+ #include <gst/audio/audio.h>
+
+ #include "mpegtsbase.h"
+ #include "tsdemux.h"
+ #include "gstmpegdesc.h"
+ #include "gstmpegdefs.h"
+ #include "mpegtspacketizer.h"
+ #include "pesparse.h"
+ #include <gst/codecparsers/gsth264parser.h>
+ #include <gst/codecparsers/gstmpegvideoparser.h>
+ #include <gst/video/video-color.h>
+
+ #include <math.h>
+
+ #define _gst_log2(x) (log(x)/log(2))
+
+ /*
+ * tsdemux
+ *
+ * See TODO for explanations on improvements needed
+ */
+
+ #define CONTINUITY_UNSET 255
+ #define MAX_CONTINUITY 15
+
+ /* Seeking/Scanning related variables */
+
+ /* seek to SEEK_TIMESTAMP_OFFSET before the desired offset and search then
+ * either accurately or for the next timestamp
+ */
+ #define SEEK_TIMESTAMP_OFFSET (2500 * GST_MSECOND)
+
+ #define GST_FLOW_REWINDING GST_FLOW_CUSTOM_ERROR
+
+ /* latency in msecs */
+ #define DEFAULT_LATENCY (700)
+
+ /* Limit PES packet collection to a maximum of 32MB
+ * which is more than large enough to support an H264 frame at
+ * maximum profile/level/bitrate at 30fps or above.
+ * PES bigger than this limit will be output in buffers of
+ * up to this size */
+ #define MAX_PES_PAYLOAD (32 * 1024 * 1024)
+
+ GST_DEBUG_CATEGORY_STATIC (ts_demux_debug);
+ #define GST_CAT_DEFAULT ts_demux_debug
+
+ #define ABSDIFF(a,b) (((a) > (b)) ? ((a) - (b)) : ((b) - (a)))
+
+ static GQuark QUARK_TSDEMUX;
+ static GQuark QUARK_PID;
+ static GQuark QUARK_PCR;
+ static GQuark QUARK_OPCR;
+ static GQuark QUARK_PTS;
+ static GQuark QUARK_DTS;
+ static GQuark QUARK_OFFSET;
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++static GQuark QUARK_BUFFER_PTS;
++#endif
+
+ typedef enum
+ {
+ PENDING_PACKET_EMPTY = 0, /* No pending packet/buffer
+ * Push incoming buffers to the array */
+ PENDING_PACKET_HEADER, /* PES header needs to be parsed
+ * Push incoming buffers to the array */
+ PENDING_PACKET_BUFFER, /* Currently filling up output buffer
+ * Push incoming buffers to the bufferlist */
+ PENDING_PACKET_DISCONT /* Discontinuity in incoming packets
+ * Drop all incoming buffers */
+ } PendingPacketState;
+
+ /* Pending buffer */
+ typedef struct
+ {
+ /* The fully reconstructed buffer */
+ GstBuffer *buffer;
+
+ /* Raw PTS/DTS (in 90kHz units) */
+ guint64 pts, dts;
+ } PendingBuffer;
+
+ typedef struct _TSDemuxStream TSDemuxStream;
+
+ typedef struct _TSDemuxH264ParsingInfos TSDemuxH264ParsingInfos;
+ typedef struct _TSDemuxJP2KParsingInfos TSDemuxJP2KParsingInfos;
+ typedef struct _TSDemuxADTSParsingInfos TSDemuxADTSParsingInfos;
+
+ /* Returns TRUE if a keyframe was found */
+ typedef gboolean (*GstTsDemuxKeyFrameScanFunction) (TSDemuxStream * stream,
+ guint8 * data, const gsize data_size, const gsize max_frame_offset);
+
+ typedef struct
+ {
+ guint8 *data;
+ gsize size;
+ } SimpleBuffer;
+
+ struct _TSDemuxH264ParsingInfos
+ {
+ /* H264 parsing data */
+ GstH264NalParser *parser;
+ GstByteWriter *sps;
+ GstByteWriter *pps;
+ GstByteWriter *sei;
+ SimpleBuffer framedata;
+ };
+
+ struct _TSDemuxJP2KParsingInfos
+ {
+ /* J2K parsing data */
+ gboolean interlace;
+ };
+
+ struct _TSDemuxADTSParsingInfos
+ {
+ guint mpegversion;
+ };
+
+ struct _TSDemuxStream
+ {
+ MpegTSBaseStream stream;
+
+ GstPad *pad;
+
+ /* Whether the pad was added or not */
+ gboolean active;
+
+ /* Whether this is a sparse stream (subtitles or metadata) */
+ gboolean sparse;
+
+ /* TRUE if we are waiting for a valid timestamp */
+ gboolean pending_ts;
+
+ /* Output data */
+ PendingPacketState state;
+
+ /* Data being reconstructed (allocated) */
+ guint8 *data;
+
+ /* Size of data being reconstructed (if known, else 0) */
+ guint expected_size;
+
+ /* Amount of bytes in current ->data */
+ guint current_size;
+ /* Size of ->data */
+ guint allocated_size;
+
+ /* Current PTS/DTS for this stream (in running time) */
+ GstClockTime pts;
+ GstClockTime dts;
+
+ /* Reference PTS used to detect gaps */
+ GstClockTime gap_ref_pts;
+ /* Number of outputted buffers */
+ guint32 nb_out_buffers;
+ /* Reference number of buffers for gaps */
+ guint32 gap_ref_buffers;
+
+ /* Current PTS/DTS for this stream (in 90kHz unit) */
+ guint64 raw_pts, raw_dts;
+
+ /* Whether this stream needs to send a newsegment */
+ gboolean need_newsegment;
+
+ /* Whether the next output buffer should be DISCONT */
+ gboolean discont;
+
+ /* The value to use when calculating the newsegment */
+ GstClockTime first_pts;
+
+ GstTagList *taglist;
+
+ gint continuity_counter;
+
+ /* List of pending buffers */
+ GList *pending;
+
+ /* if != 0, output only PES from that substream */
+ guint8 target_pes_substream;
+ gboolean needs_keyframe;
+
+ GstClockTime seeked_pts, seeked_dts;
+
+ GstTsDemuxKeyFrameScanFunction scan_function;
+ TSDemuxH264ParsingInfos h264infos;
+ TSDemuxJP2KParsingInfos jp2kInfos;
+ TSDemuxADTSParsingInfos atdsInfos;
+ };
+
+ #define VIDEO_CAPS \
+ GST_STATIC_CAPS (\
+ "video/mpeg, " \
+ "mpegversion = (int) { 1, 2, 4 }, " \
+ "systemstream = (boolean) FALSE; " \
+ "video/x-h264,stream-format=(string)byte-stream;" \
+ "video/x-h265,stream-format=(string)byte-stream;" \
+ "video/x-dirac;" \
+ "video/x-cavs;" \
+ "video/x-wmv," \
+ "wmvversion = (int) 3, " \
+ "format = (string) WVC1;" \
+ "image/x-jpc;" \
+ )
+
+ #define AUDIO_CAPS \
+ GST_STATIC_CAPS ( \
+ "audio/mpeg, " \
+ "mpegversion = (int) 1;" \
+ "audio/mpeg, " \
+ "mpegversion = (int) { 2, 4 }, " \
+ "stream-format = (string) adts; " \
+ "audio/mpeg, " \
+ "mpegversion = (int) 4, " \
+ "stream-format = (string) loas; " \
+ "audio/x-lpcm, " \
+ "width = (int) { 16, 20, 24 }, " \
+ "rate = (int) { 48000, 96000 }, " \
+ "channels = (int) [ 1, 8 ], " \
+ "dynamic_range = (int) [ 0, 255 ], " \
+ "emphasis = (boolean) { FALSE, TRUE }, " \
+ "mute = (boolean) { FALSE, TRUE }; " \
+ "audio/x-ac3; audio/x-eac3;" \
+ "audio/x-ac4;" \
+ "audio/x-dts;" \
+ "audio/x-opus;" \
+ "audio/x-private-ts-lpcm" \
+ )
+
+ /* Can also use the subpicture pads for text subtitles? */
+ #define SUBPICTURE_CAPS \
+ GST_STATIC_CAPS ("subpicture/x-pgs; subpicture/x-dvd; subpicture/x-dvb")
+
+ static GstStaticPadTemplate video_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%01x_%05x", GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ VIDEO_CAPS);
+
+ static GstStaticPadTemplate audio_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%01x_%05x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ AUDIO_CAPS);
+
+ static GstStaticPadTemplate subpicture_template =
+ GST_STATIC_PAD_TEMPLATE ("subpicture_%01x_%05x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ SUBPICTURE_CAPS);
+
+ static GstStaticPadTemplate private_template =
+ GST_STATIC_PAD_TEMPLATE ("private_%01x_%05x",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ enum
+ {
+ PROP_0,
+ PROP_PROGRAM_NUMBER,
+ PROP_EMIT_STATS,
+ PROP_LATENCY,
+ /* FILL ME */
+ };
+
+ /* Pad functions */
+
+
+ /* mpegtsbase methods */
+ static void
+ gst_ts_demux_update_program (MpegTSBase * base, MpegTSBaseProgram * program);
+ static void
+ gst_ts_demux_program_started (MpegTSBase * base, MpegTSBaseProgram * program);
+ static void
+ gst_ts_demux_program_stopped (MpegTSBase * base, MpegTSBaseProgram * program);
+ static gboolean
+ gst_ts_demux_can_remove_program (MpegTSBase * base,
+ MpegTSBaseProgram * program);
+ static void gst_ts_demux_reset (MpegTSBase * base);
+ static GstFlowReturn
+ gst_ts_demux_push (MpegTSBase * base, MpegTSPacketizerPacket * packet,
+ GstMpegtsSection * section);
+ static void gst_ts_demux_flush (MpegTSBase * base, gboolean hard);
+ static GstFlowReturn gst_ts_demux_drain (MpegTSBase * base);
+ static gboolean
+ gst_ts_demux_stream_added (MpegTSBase * base, MpegTSBaseStream * stream,
+ MpegTSBaseProgram * program);
+ static void
+ gst_ts_demux_stream_removed (MpegTSBase * base, MpegTSBaseStream * stream);
+ static GstFlowReturn gst_ts_demux_do_seek (MpegTSBase * base, GstEvent * event);
+ static void gst_ts_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_ts_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_ts_demux_flush_streams (GstTSDemux * tsdemux, gboolean hard);
+ static GstFlowReturn
+ gst_ts_demux_push_pending_data (GstTSDemux * demux, TSDemuxStream * stream,
+ MpegTSBaseProgram * program);
+ static void gst_ts_demux_stream_flush (TSDemuxStream * stream,
+ GstTSDemux * demux, gboolean hard);
+
+ static gboolean push_event (MpegTSBase * base, GstEvent * event);
+ static gboolean sink_query (MpegTSBase * base, GstQuery * query);
+ static void gst_ts_demux_check_and_sync_streams (GstTSDemux * demux,
+ GstClockTime time);
+
+ static void
+ _extra_init (void)
+ {
+ QUARK_TSDEMUX = g_quark_from_string ("tsdemux");
+ QUARK_PID = g_quark_from_string ("pid");
+ QUARK_PCR = g_quark_from_string ("pcr");
+ QUARK_OPCR = g_quark_from_string ("opcr");
+ QUARK_PTS = g_quark_from_string ("pts");
+ QUARK_DTS = g_quark_from_string ("dts");
+ QUARK_OFFSET = g_quark_from_string ("offset");
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ QUARK_BUFFER_PTS = g_quark_from_string ("buffer_pts");
++#endif
+ }
+
+ #define gst_ts_demux_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstTSDemux, gst_ts_demux, GST_TYPE_MPEGTS_BASE,
+ _extra_init ());
+ #define _do_element_init \
+ GST_DEBUG_CATEGORY_INIT (ts_demux_debug, "tsdemux", 0, \
+ "MPEG transport stream demuxer");\
+ init_pes_parser ();
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (tsdemux, "tsdemux",
+ GST_RANK_PRIMARY, GST_TYPE_TS_DEMUX, _do_element_init);
+
+ static void
+ gst_ts_demux_dispose (GObject * object)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX_CAST (object);
+
+ gst_flow_combiner_free (demux->flowcombiner);
+
+ GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
+ }
+
+ static void
+ gst_ts_demux_class_init (GstTSDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ MpegTSBaseClass *ts_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gobject_class->set_property = gst_ts_demux_set_property;
+ gobject_class->get_property = gst_ts_demux_get_property;
+ gobject_class->dispose = gst_ts_demux_dispose;
+
+ g_object_class_install_property (gobject_class, PROP_PROGRAM_NUMBER,
+ g_param_spec_int ("program-number", "Program number",
+ "Program Number to demux for (-1 to ignore)", -1, G_MAXINT,
+ -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_EMIT_STATS,
+ g_param_spec_boolean ("emit-stats", "Emit statistics",
+ "Emit messages for every pcr/opcr/pts/dts", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_int ("latency", "Latency",
+ "Latency to add for smooth demuxing (in ms)", -1,
+ G_MAXINT, DEFAULT_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&video_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&audio_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&subpicture_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&private_template));
+
+ gst_element_class_set_static_metadata (element_class,
+ "MPEG transport stream demuxer",
+ "Codec/Demuxer",
+ "Demuxes MPEG2 transport streams",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>\n"
+ "Edward Hervey <edward.hervey@collabora.co.uk>");
+
+ ts_class = GST_MPEGTS_BASE_CLASS (klass);
+ ts_class->reset = GST_DEBUG_FUNCPTR (gst_ts_demux_reset);
+ ts_class->push = GST_DEBUG_FUNCPTR (gst_ts_demux_push);
+ ts_class->push_event = GST_DEBUG_FUNCPTR (push_event);
+ ts_class->sink_query = GST_DEBUG_FUNCPTR (sink_query);
+ ts_class->program_started = GST_DEBUG_FUNCPTR (gst_ts_demux_program_started);
+ ts_class->program_stopped = GST_DEBUG_FUNCPTR (gst_ts_demux_program_stopped);
+ ts_class->update_program = GST_DEBUG_FUNCPTR (gst_ts_demux_update_program);
+ ts_class->can_remove_program = gst_ts_demux_can_remove_program;
+ ts_class->stream_added = gst_ts_demux_stream_added;
+ ts_class->stream_removed = gst_ts_demux_stream_removed;
+ ts_class->seek = GST_DEBUG_FUNCPTR (gst_ts_demux_do_seek);
+ ts_class->flush = GST_DEBUG_FUNCPTR (gst_ts_demux_flush);
+ ts_class->drain = GST_DEBUG_FUNCPTR (gst_ts_demux_drain);
+ }
+
+ static void
+ gst_ts_demux_reset (MpegTSBase * base)
+ {
+ GstTSDemux *demux = (GstTSDemux *) base;
+
+ demux->rate = 1.0;
+ if (demux->segment_event) {
+ gst_event_unref (demux->segment_event);
+ demux->segment_event = NULL;
+ }
+
+ if (demux->global_tags) {
+ gst_tag_list_unref (demux->global_tags);
+ demux->global_tags = NULL;
+ }
++#ifdef TIZEN_FEATURE_TSDEMUX_LANG_TAG
++ if (demux->pending_custom_event) {
++ gst_event_unref (demux->pending_custom_event);
++ demux->pending_custom_event = NULL;
++ }
++
++ if (demux->pending_language_tag) {
++ gst_tag_list_unref (demux->pending_language_tag);
++ demux->pending_language_tag = NULL;
++ }
++#endif
+
+ if (demux->previous_program) {
+ mpegts_base_deactivate_and_free_program (base, demux->previous_program);
+ demux->previous_program = NULL;
+ }
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ demux->last_seek_offset = -1;
+ demux->program_generation = 0;
+ }
+
+ static void
+ gst_ts_demux_init (GstTSDemux * demux)
+ {
+ MpegTSBase *base = (MpegTSBase *) demux;
+
+ base->stream_size = sizeof (TSDemuxStream);
+ base->parse_private_sections = TRUE;
+ /* We are not interested in sections (all handled by mpegtsbase) */
+ base->push_section = FALSE;
+
+ demux->flowcombiner = gst_flow_combiner_new ();
+ demux->requested_program_number = -1;
+ demux->program_number = -1;
+ demux->latency = DEFAULT_LATENCY;
+ gst_ts_demux_reset (base);
+ }
+
+
+ static void
+ gst_ts_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PROGRAM_NUMBER:
+ /* FIXME: do something if program is switched as opposed to set at
+ * beginning */
+ demux->requested_program_number = g_value_get_int (value);
+ break;
+ case PROP_EMIT_STATS:
+ demux->emit_statistics = g_value_get_boolean (value);
+ break;
+ case PROP_LATENCY:
+ demux->latency = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ gst_ts_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PROGRAM_NUMBER:
+ g_value_set_int (value, demux->requested_program_number);
+ break;
+ case PROP_EMIT_STATS:
+ g_value_set_boolean (value, demux->emit_statistics);
+ break;
+ case PROP_LATENCY:
+ g_value_set_int (value, demux->latency);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static gboolean
+ gst_ts_demux_get_duration (GstTSDemux * demux, GstClockTime * dur)
+ {
+ MpegTSBase *base = (MpegTSBase *) demux;
+ gboolean res = FALSE;
+ gint64 val;
+
+ if (!demux->program) {
+ GST_DEBUG_OBJECT (demux, "No active program yet, can't provide duration");
+ return FALSE;
+ }
+
+ /* Get total size in bytes */
+ if (gst_pad_peer_query_duration (base->sinkpad, GST_FORMAT_BYTES, &val)) {
+ /* Convert it to duration */
+ *dur =
+ mpegts_packetizer_offset_to_ts (base->packetizer, val,
+ demux->program->pcr_pid);
+ if (GST_CLOCK_TIME_IS_VALID (*dur))
+ res = TRUE;
+ }
+ return res;
+ }
+
+ static gboolean
+ gst_ts_demux_srcpad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean res = TRUE;
+ GstFormat format;
+ GstTSDemux *demux;
+ MpegTSBase *base;
+
+ demux = GST_TS_DEMUX (parent);
+ base = GST_MPEGTS_BASE (demux);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GST_DEBUG ("query duration");
+ gst_query_parse_duration (query, &format, NULL);
+ if (format == GST_FORMAT_TIME) {
+ if (!gst_pad_peer_query (base->sinkpad, query)) {
+ GstClockTime dur;
+ if (gst_ts_demux_get_duration (demux, &dur))
+ gst_query_set_duration (query, GST_FORMAT_TIME, dur);
+ else
+ res = FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (demux, "only query duration on TIME is supported");
+ res = FALSE;
+ }
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ GST_DEBUG ("query latency");
+ res = gst_pad_peer_query (base->sinkpad, query);
+ if (res) {
+ GstClockTime min_lat, max_lat;
+ gboolean live;
+ gint latency;
+
+ /* According to H.222.0
+ Annex D.0.3 (System Time Clock recovery in the decoder)
+ and D.0.2 (Audio and video presentation synchronization)
+
+ We can end up with an interval of up to 700ms between valid
+ PTS/DTS. We therefore allow a latency of 700ms for that.
+ */
+ latency = demux->latency;
+ if (latency < 0)
+ latency = 700;
+ gst_query_parse_latency (query, &live, &min_lat, &max_lat);
+ min_lat += latency * GST_MSECOND;
+ if (GST_CLOCK_TIME_IS_VALID (max_lat))
+ max_lat += latency * GST_MSECOND;
+ gst_query_set_latency (query, live, min_lat, max_lat);
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:
+ {
+ GST_DEBUG ("query seeking");
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ GST_DEBUG ("asked for format %s", gst_format_get_name (format));
+ if (format == GST_FORMAT_TIME) {
+ gboolean seekable = FALSE;
+
+ if (gst_pad_peer_query (base->sinkpad, query))
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+
+ /* If upstream is not seekable in TIME format we use
+ * our own values here */
+ if (!seekable) {
+ GstClockTime dur;
+ if (gst_ts_demux_get_duration (demux, &dur)) {
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, dur);
+ GST_DEBUG ("Gave duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (dur));
+ }
+ }
+ } else {
+ GST_DEBUG_OBJECT (demux, "only TIME is supported for query seeking");
+ res = FALSE;
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:{
+ GstFormat format;
+ gint64 start, stop;
+
+ format = base->out_segment.format;
+
+ start =
+ gst_segment_to_stream_time (&base->out_segment, format,
+ base->out_segment.start);
+ if ((stop = base->out_segment.stop) == -1)
+ stop = base->out_segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&base->out_segment, format, stop);
+
+ gst_query_set_segment (query, base->out_segment.rate, format, start,
+ stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ }
+
+ return res;
+
+ }
+
+ static void
+ clear_simple_buffer (SimpleBuffer * sbuf)
+ {
+ if (!sbuf->data)
+ return;
+
+ g_free (sbuf->data);
+ sbuf->size = 0;
+ sbuf->data = NULL;
+ }
+
+ static gboolean
+ scan_keyframe_h264 (TSDemuxStream * stream, const guint8 * data,
+ const gsize data_size, const gsize max_frame_offset)
+ {
+ gint offset = 0;
+ GstH264NalUnit unit, frame_unit = { 0, };
+ GstH264ParserResult res = GST_H264_PARSER_OK;
+ TSDemuxH264ParsingInfos *h264infos = &stream->h264infos;
+
+ GstH264NalParser *parser = h264infos->parser;
+
+ if (G_UNLIKELY (parser == NULL)) {
+ parser = h264infos->parser = gst_h264_nal_parser_new ();
+ h264infos->sps = gst_byte_writer_new ();
+ h264infos->pps = gst_byte_writer_new ();
+ h264infos->sei = gst_byte_writer_new ();
+ }
+
+ while (res == GST_H264_PARSER_OK) {
+ res =
+ gst_h264_parser_identify_nalu (parser, data, offset, data_size, &unit);
+
+ if (res != GST_H264_PARSER_OK && res != GST_H264_PARSER_NO_NAL_END) {
+ GST_INFO_OBJECT (stream->pad, "Error identifying nalu: %i", res);
+ break;
+ }
+
+ res = gst_h264_parser_parse_nal (parser, &unit);
+ if (res != GST_H264_PARSER_OK) {
+ break;
+ }
+
+ switch (unit.type) {
+ case GST_H264_NAL_SEI:
+ if (frame_unit.size)
+ break;
+
+ if (gst_byte_writer_put_data (h264infos->sei,
+ unit.data + unit.sc_offset,
+ unit.size + unit.offset - unit.sc_offset)) {
+ GST_DEBUG ("adding SEI %u", unit.size + unit.offset - unit.sc_offset);
+ } else {
+ GST_WARNING ("Could not write SEI");
+ }
+ break;
+ case GST_H264_NAL_PPS:
+ if (frame_unit.size)
+ break;
+
+ if (gst_byte_writer_put_data (h264infos->pps,
+ unit.data + unit.sc_offset,
+ unit.size + unit.offset - unit.sc_offset)) {
+ GST_DEBUG ("adding PPS %u", unit.size + unit.offset - unit.sc_offset);
+ } else {
+ GST_WARNING ("Could not write PPS");
+ }
+ break;
+ case GST_H264_NAL_SPS:
+ if (frame_unit.size)
+ break;
+
+ if (gst_byte_writer_put_data (h264infos->sps,
+ unit.data + unit.sc_offset,
+ unit.size + unit.offset - unit.sc_offset)) {
+ GST_DEBUG ("adding SPS %u", unit.size + unit.offset - unit.sc_offset);
+ } else {
+ GST_WARNING ("Could not write SPS");
+ }
+ break;
+ /* these units are considered keyframes in h264parse */
+ case GST_H264_NAL_SLICE:
+ case GST_H264_NAL_SLICE_DPA:
+ case GST_H264_NAL_SLICE_DPB:
+ case GST_H264_NAL_SLICE_DPC:
+ case GST_H264_NAL_SLICE_IDR:
+ {
+ GstH264SliceHdr slice;
+
+ if (h264infos->framedata.size)
+ break;
+
+ res = gst_h264_parser_parse_slice_hdr (parser, &unit, &slice,
+ FALSE, FALSE);
+
+ if (GST_H264_IS_I_SLICE (&slice) || GST_H264_IS_SI_SLICE (&slice)) {
+ if (*(unit.data + unit.offset + 1) & 0x80) {
+ /* means first_mb_in_slice == 0 */
+ /* real frame data */
+ GST_DEBUG_OBJECT (stream->pad, "Found keyframe at: %u",
+ unit.sc_offset);
+ frame_unit = unit;
+ }
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (offset == unit.sc_offset + unit.size)
+ break;
+
+ offset = unit.sc_offset + unit.size;
+ }
+
+ /* We've got all the infos we need (SPS / PPS and a keyframe, plus
+ * and possibly SEI units. We can stop rewinding the stream
+ */
+ if (gst_byte_writer_get_size (h264infos->sps) &&
+ gst_byte_writer_get_size (h264infos->pps) &&
+ (h264infos->framedata.size || frame_unit.size)) {
+ guint8 *data = NULL;
+
+ gsize tmpsize = gst_byte_writer_get_size (h264infos->pps);
+
+ /* We know that the SPS is first so just put all our data in there */
+ data = gst_byte_writer_reset_and_get_data (h264infos->pps);
+ gst_byte_writer_put_data (h264infos->sps, data, tmpsize);
+ g_free (data);
+
+ tmpsize = gst_byte_writer_get_size (h264infos->sei);
+ if (tmpsize) {
+ GST_DEBUG ("Adding SEI");
+ data = gst_byte_writer_reset_and_get_data (h264infos->sei);
+ gst_byte_writer_put_data (h264infos->sps, data, tmpsize);
+ g_free (data);
+ }
+
+ if (frame_unit.size) { /* We found the everything in one go! */
+ GST_DEBUG ("Adding Keyframe");
+ gst_byte_writer_put_data (h264infos->sps,
+ frame_unit.data + frame_unit.sc_offset,
+ stream->current_size - frame_unit.sc_offset);
+ } else {
+ GST_DEBUG ("Adding Keyframe");
+ gst_byte_writer_put_data (h264infos->sps,
+ h264infos->framedata.data, h264infos->framedata.size);
+ clear_simple_buffer (&h264infos->framedata);
+ }
+
+ g_free (stream->data);
+ stream->current_size = gst_byte_writer_get_size (h264infos->sps);
+ stream->data = gst_byte_writer_reset_and_get_data (h264infos->sps);
+ gst_byte_writer_init (h264infos->sps);
+ gst_byte_writer_init (h264infos->pps);
+ gst_byte_writer_init (h264infos->sei);
+
+ return TRUE;
+ }
+
+ if (frame_unit.size) {
+ GST_DEBUG_OBJECT (stream->pad, "Keep the keyframe as this is the one"
+ " we will push later");
+
+ h264infos->framedata.data =
+ g_memdup2 (frame_unit.data + frame_unit.sc_offset,
+ stream->current_size - frame_unit.sc_offset);
+ h264infos->framedata.size = stream->current_size - frame_unit.sc_offset;
+ }
+
+ return FALSE;
+ }
+
+ /* We merge data from TS packets so that the scanning methods get a continuous chunk,
+ however the scanning method will return keyframe offset which needs to be translated
+ back to actual offset in file */
+ typedef struct
+ {
+ gint64 real_offset; /* offset of TS packet */
+ gint merged_offset; /* offset of merged data in buffer */
+ } OffsetInfo;
+
+ static gboolean
+ gst_ts_demux_adjust_seek_offset_for_keyframe (TSDemuxStream * stream,
+ guint8 * data, guint64 size)
+ {
+ int scan_pid = -1;
+
+ if (!stream->scan_function)
+ return TRUE;
+
+ scan_pid = ((MpegTSBaseStream *) stream)->pid;
+
+ if (scan_pid != -1) {
+ return stream->scan_function (stream, data, size, size);
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_ts_demux_do_seek (MpegTSBase * base, GstEvent * event)
+ {
+ GList *tmp;
+
+ GstTSDemux *demux = (GstTSDemux *) base;
+ GstFlowReturn res = GST_FLOW_ERROR;
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ guint64 start_offset;
+ gboolean update = FALSE;
+ GstSegment seeksegment;
+
+ GST_DEBUG ("seek event, %" GST_PTR_FORMAT, event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (rate <= 0.0) {
+ GST_WARNING ("Negative rate not supported");
+ goto done;
+ }
+
+ if (flags & (GST_SEEK_FLAG_SEGMENT)) {
+ GST_WARNING ("seek flags 0x%x are not supported", (int) flags);
+ goto done;
+ }
+
+ /* configure the segment with the seek variables */
+ memcpy (&seeksegment, &base->out_segment, sizeof (GstSegment));
+ GST_LOG_OBJECT (demux, "Before seek, output segment %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ /* record offset and rate */
+ demux->rate = rate;
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags, start_type,
+ start, stop_type, stop, &update)) {
+ GST_DEBUG_OBJECT (demux, "Seek failed in gst_segment_do_seek()");
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "After seek, update %d output segment now %" GST_SEGMENT_FORMAT, update,
+ &seeksegment);
+
+ /* If the position actually changed, update == TRUE */
+ if (update) {
+ GstClockTime target = seeksegment.start;
+ if (target >= SEEK_TIMESTAMP_OFFSET)
+ target -= SEEK_TIMESTAMP_OFFSET;
+ else
+ target = 0;
+
+ start_offset =
+ mpegts_packetizer_ts_to_offset (base->packetizer, target,
+ demux->program->pcr_pid);
+ if (G_UNLIKELY (start_offset == -1)) {
+ GST_WARNING ("Couldn't convert start position to an offset");
+ goto done;
+ }
+
+ base->seek_offset = start_offset;
+ demux->last_seek_offset = base->seek_offset;
+ /* Reset segment if we're not doing an accurate seek */
+ demux->reset_segment = (!(flags & GST_SEEK_FLAG_ACCURATE));
+
+ /* Clear any existing segment - it will be recalculated after streaming recommences */
+ gst_event_replace (&demux->segment_event, NULL);
+
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = tmp->data;
+
+ if (flags & GST_SEEK_FLAG_ACCURATE)
+ stream->needs_keyframe = TRUE;
+
+ stream->seeked_pts = GST_CLOCK_TIME_NONE;
+ stream->seeked_dts = GST_CLOCK_TIME_NONE;
+ stream->first_pts = GST_CLOCK_TIME_NONE;
+ stream->need_newsegment = TRUE;
+ }
+ } else {
+ /* Position didn't change, just update the output segment based on
+ * our new one */
+ gst_event_replace (&demux->segment_event, NULL);
+ demux->segment_event = gst_event_new_segment (&seeksegment);
+ if (base->last_seek_seqnum)
+ gst_event_set_seqnum (demux->segment_event, base->last_seek_seqnum);
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = tmp->data;
+ stream->need_newsegment = TRUE;
+ }
+ }
+
+ /* Commit the new segment */
+ memcpy (&base->out_segment, &seeksegment, sizeof (GstSegment));
+ res = GST_FLOW_OK;
+
+ done:
+ return res;
+ }
+
+ static gboolean
+ gst_ts_demux_srcpad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstTSDemux *demux = GST_TS_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (pad, "Got event %s",
+ gst_event_type_get_name (GST_EVENT_TYPE (event)));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ res = mpegts_base_handle_seek_event ((MpegTSBase *) demux, pad, event);
+ if (!res)
+ GST_WARNING ("seeking failed");
+ gst_event_unref (event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ }
+
+ return res;
+ }
+
+ static void
+ clean_global_taglist (GstTagList * taglist)
+ {
+ gst_tag_list_remove_tag (taglist, GST_TAG_CONTAINER_FORMAT);
+ gst_tag_list_remove_tag (taglist, GST_TAG_CODEC);
+ }
+
+ static gboolean
+ push_event (MpegTSBase * base, GstEvent * event)
+ {
+ GstTSDemux *demux = (GstTSDemux *) base;
+ GList *tmp;
+ gboolean early_ret = FALSE;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
+ GST_DEBUG_OBJECT (base, "Ignoring segment event (recreated later)");
+ gst_event_unref (event);
+ return TRUE;
+
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) {
+ /* In case we receive tags before data, store them to send later
+ * If we already have the program, send it right away */
+ GstTagList *taglist;
+
+ gst_event_parse_tag (event, &taglist);
+
+ if (demux->global_tags == NULL) {
+ demux->global_tags = gst_tag_list_copy (taglist);
+
+ /* Tags that are stream specific for the container should be considered
+ * global for the container streams */
+ if (gst_tag_list_get_scope (taglist) == GST_TAG_SCOPE_STREAM) {
+ gst_tag_list_set_scope (demux->global_tags, GST_TAG_SCOPE_GLOBAL);
+ }
+ } else {
+ demux->global_tags = gst_tag_list_make_writable (demux->global_tags);
+ gst_tag_list_insert (demux->global_tags, taglist, GST_TAG_MERGE_REPLACE);
+ }
+ clean_global_taglist (demux->global_tags);
+
+ /* tags are stored to be used after if there are no streams yet,
+ * so we should never reject */
+ early_ret = TRUE;
+ }
+
+ if (G_UNLIKELY (demux->program == NULL)) {
++#ifdef TIZEN_FEATURE_TSDEMUX_LANG_TAG
++ if (demux->global_tags) {
++ gchar *tag = NULL;
++ gst_tag_list_get_string (demux->global_tags, "language-code", &tag);
++ if (tag) {
++ demux->pending_language_tag =
++ gst_tag_list_new (GST_TAG_LANGUAGE_CODE, tag, NULL);
++ g_free (tag);
++ }
++ }
++
++ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_DOWNSTREAM) {
++ const GstStructure *st = gst_event_get_structure (event);
++ if (gst_structure_has_name (st, "GstHLSMedia")) {
++ if (demux->pending_custom_event)
++ gst_event_unref (demux->pending_custom_event);
++ demux->pending_custom_event = gst_event_ref (event);
++ }
++ }
++#endif
+ gst_event_unref (event);
+ return early_ret;
+ }
+
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+ if (stream->pad) {
+ /* If we are pushing out EOS, flush out pending data first */
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS &&
+ gst_pad_is_active (stream->pad))
+ gst_ts_demux_push_pending_data (demux, stream, NULL);
+
+ gst_event_ref (event);
+ gst_pad_push_event (stream->pad, event);
+ }
+ }
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+
+ static gboolean
+ sink_query (MpegTSBase * base, GstQuery * query)
+ {
+ GstTSDemux *demux = (GstTSDemux *) base;
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:{
+ gint64 size_bytes;
+ GstClockTime duration;
+
+ if (gst_pad_peer_query_duration (base->sinkpad, GST_FORMAT_BYTES,
+ &size_bytes) && size_bytes > 0) {
+ if (gst_ts_demux_get_duration (demux, &duration) && duration > 0
+ && duration != GST_CLOCK_TIME_NONE) {
+ guint bitrate =
+ gst_util_uint64_scale (8 * size_bytes, GST_SECOND, duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting in a bitrate of %u",
+ size_bytes, GST_TIME_ARGS (duration), bitrate);
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ }
+ break;
+ }
+ default:
+ res = GST_MPEGTS_BASE_CLASS (parent_class)->sink_query (base, query);
+ break;
+ }
+
+ return res;
+ }
+
+ static inline void
+ add_iso639_language_to_tags (TSDemuxStream * stream, gchar * lang_code)
+ {
+ const gchar *lc;
+
+ GST_LOG ("Add language code for stream: '%s'", lang_code);
+
+ if (!stream->taglist)
+ stream->taglist = gst_tag_list_new_empty ();
+
+ /* descriptor contains ISO 639-2 code, we want the ISO 639-1 code */
+ lc = gst_tag_get_language_code (lang_code);
+
+ /* Only set tag if we have a valid one */
+ if (lc || (lang_code[0] && lang_code[1]))
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, (lc) ? lc : lang_code, NULL);
+ }
+
+ static void
+ gst_ts_demux_create_tags (TSDemuxStream * stream)
+ {
+ MpegTSBaseStream *bstream = (MpegTSBaseStream *) stream;
+ const GstMpegtsDescriptor *desc = NULL;
+ int i, nb;
+
+ desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_ISO_639_LANGUAGE);
+ if (desc) {
+ gchar *lang_code;
+
+ nb = gst_mpegts_descriptor_parse_iso_639_language_nb (desc);
+
+ GST_DEBUG ("Found ISO 639 descriptor (%d entries)", nb);
+
+ for (i = 0; i < nb; i++)
+ if (gst_mpegts_descriptor_parse_iso_639_language_idx (desc, i, &lang_code,
+ NULL)) {
+ add_iso639_language_to_tags (stream, lang_code);
+ g_free (lang_code);
+ }
+
+ return;
+ }
+
+ desc =
+ mpegts_get_descriptor_from_stream (bstream, GST_MTS_DESC_DVB_SUBTITLING);
+
+ if (desc) {
+ gchar *lang_code;
+
+ nb = gst_mpegts_descriptor_parse_dvb_subtitling_nb (desc);
+
+ GST_DEBUG ("Found SUBTITLING descriptor (%d entries)", nb);
+
+ for (i = 0; i < nb; i++)
+ if (gst_mpegts_descriptor_parse_dvb_subtitling_idx (desc, i, &lang_code,
+ NULL, NULL, NULL)) {
+ add_iso639_language_to_tags (stream, lang_code);
+ g_free (lang_code);
+ }
+ }
+
+ if (bstream->stream_type == GST_MPEGTS_STREAM_TYPE_PRIVATE_PES_PACKETS) {
+ desc = mpegts_get_descriptor_from_stream_with_extension (bstream,
+ GST_MTS_DESC_DVB_EXTENSION, GST_MTS_DESC_EXT_DVB_AUDIO_PRESELECTION);
+
+ if (desc) {
+ GPtrArray *list;
+ GstMpegtsAudioPreselectionDescriptor *item;
+
+ if (gst_mpegts_descriptor_parse_audio_preselection_list (desc, &list)) {
+ GST_DEBUG ("Found AUDIO PRESELECTION descriptor (%d entries)",
+ list->len);
+
+ for (i = 0; i < list->len; i++) {
+ item = g_ptr_array_index (list, i);
+ gst_mpegts_descriptor_parse_audio_preselection_dump (item);
+
+ if (item->language_code_present) {
+ add_iso639_language_to_tags (stream, item->language_code);
+ break;
+ }
+ }
+ g_ptr_array_unref (list);
+ }
+ }
+ }
+ }
+
+ static GstPad *
+ create_pad_for_stream (MpegTSBase * base, MpegTSBaseStream * bstream,
+ MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (base);
+ TSDemuxStream *stream = (TSDemuxStream *) bstream;
+ gchar *name = NULL;
+ GstCaps *caps = NULL;
+ GstPadTemplate *template = NULL;
+ const GstMpegtsDescriptor *desc = NULL;
+ GstPad *pad = NULL;
+ gboolean sparse = FALSE;
+ gboolean is_audio = FALSE, is_video = FALSE, is_subpicture = FALSE,
+ is_private = FALSE;
+
+ gst_ts_demux_create_tags (stream);
+
+ GST_LOG ("Attempting to create pad for stream 0x%04x with stream_type %d",
+ bstream->pid, bstream->stream_type);
+
+ /* First handle BluRay-specific stream types since there is some overlap
+ * between BluRay and non-BluRay streay type identifiers */
+ if (program->registration_id == DRF_ID_HDMV) {
+ switch (bstream->stream_type) {
+ case ST_BD_AUDIO_AC3:
+ {
+ const GstMpegtsDescriptor *ac3_desc;
+
+ /* ATSC ac3 audio descriptor */
+ ac3_desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_AC3_AUDIO_STREAM);
+ if (ac3_desc && DESC_AC_AUDIO_STREAM_bsid (ac3_desc->data) != 16) {
+ GST_LOG ("ac3 audio");
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac3");
+ } else {
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ }
+ break;
+ }
+ case ST_BD_AUDIO_EAC3:
+ case ST_BD_AUDIO_AC3_PLUS:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ break;
+ case ST_BD_AUDIO_AC4:
+ /* Opus also uses 0x06, and there are bad streams that have HDMV registration ID,
+ * but contain an Opus registration id, so check for it */
+ if (bstream->registration_id != DRF_ID_OPUS) {
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac4");
+ }
+ break;
+ case ST_BD_AUDIO_AC3_TRUE_HD:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-true-hd");
+ stream->target_pes_substream = 0x72;
+ break;
+ case ST_BD_AUDIO_LPCM:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-private-ts-lpcm");
+ break;
+ case ST_BD_PGS_SUBPICTURE:
+ is_subpicture = TRUE;
+ caps = gst_caps_new_empty_simple ("subpicture/x-pgs");
+ sparse = TRUE;
+ break;
+ case ST_BD_AUDIO_DTS_HD:
+ case ST_BD_AUDIO_DTS_HD_MASTER_AUDIO:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
+ stream->target_pes_substream = 0x71;
+ break;
+ }
+ }
+
+ if (caps)
+ goto done;
+
+ /* Handle non-BluRay stream types */
+ switch (bstream->stream_type) {
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_MPEG1:
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_MPEG2:
+ case ST_PS_VIDEO_MPEG2_DCII:
+ /* FIXME : Use DCII registration code (ETV1 ?) to handle that special
+ * Stream type (ST_PS_VIDEO_MPEG2_DCII) */
+ /* FIXME : Use video descriptor (0x1) to refine caps with:
+ * * frame_rate
+ * * profile_and_level
+ */
+ GST_LOG ("mpeg video");
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT,
+ bstream->stream_type == GST_MPEGTS_STREAM_TYPE_VIDEO_MPEG1 ? 1 : 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+
+ break;
+ case GST_MPEGTS_STREAM_TYPE_AUDIO_MPEG1:
+ case GST_MPEGTS_STREAM_TYPE_AUDIO_MPEG2:
+ GST_LOG ("mpeg audio");
+ is_audio = TRUE;
+ caps =
+ gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 1,
+ NULL);
+ /* HDV is always mpeg 1 audio layer 2 */
+ if (program->registration_id == DRF_ID_TSHV)
+ gst_caps_set_simple (caps, "layer", G_TYPE_INT, 2, NULL);
+ break;
+ case GST_MPEGTS_STREAM_TYPE_PRIVATE_PES_PACKETS:
+ GST_LOG ("private data");
+ /* FIXME: Move all of this into a common method (there might be other
+ * types also, depending on registratino descriptors also
+ */
+
+ desc = mpegts_get_descriptor_from_stream_with_extension (bstream,
+ GST_MTS_DESC_DVB_EXTENSION, GST_MTS_DESC_EXT_DVB_AC4);
+ if (desc) {
+ GST_LOG ("ac4 audio");
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac4");
+ break;
+ }
+
+ desc = mpegts_get_descriptor_from_stream (bstream, GST_MTS_DESC_DVB_AC3);
+ if (desc) {
+ GST_LOG ("ac3 audio");
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac3");
+ break;
+ }
+
+ desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_DVB_ENHANCED_AC3);
+ if (desc) {
+ GST_LOG ("ac3 audio");
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ break;
+ }
+ desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_DVB_TELETEXT);
+ if (desc) {
+ GST_LOG ("teletext");
+ is_private = TRUE;
+ caps = gst_caps_new_empty_simple ("application/x-teletext");
+ sparse = TRUE;
+ break;
+ }
+ desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_DVB_SUBTITLING);
+ if (desc) {
+ GST_LOG ("subtitling");
+ is_subpicture = TRUE;
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvb");
+ sparse = TRUE;
+ break;
+ }
+
+ switch (bstream->registration_id) {
+ case DRF_ID_DTS1:
+ case DRF_ID_DTS2:
+ case DRF_ID_DTS3:
+ /* SMPTE registered DTS */
+ is_private = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
+ break;
+ case DRF_ID_S302M:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-smpte-302m");
+ break;
+ case DRF_ID_OPUS:
+ desc = mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_DVB_EXTENSION);
+ if (desc != NULL && desc->tag_extension == 0x80 && desc->length >= 1) { /* User defined (provisional Opus) */
+ guint8 channel_config_code;
+ GstByteReader br;
+
+ /* skip tag, length and tag_extension */
+ gst_byte_reader_init (&br, desc->data + 3, desc->length - 1);
+ channel_config_code = gst_byte_reader_get_uint8_unchecked (&br);
+
+ if ((channel_config_code & 0x8f) <= 8) {
+ static const guint8 coupled_stream_counts[9] = {
+ 1, 0, 1, 1, 2, 2, 2, 3, 3
+ };
+ static const guint8 channel_map_a[8][8] = {
+ {0},
+ {0, 1},
+ {0, 2, 1},
+ {0, 1, 2, 3},
+ {0, 4, 1, 2, 3},
+ {0, 4, 1, 2, 3, 5},
+ {0, 4, 1, 2, 3, 5, 6},
+ {0, 6, 1, 2, 3, 4, 5, 7},
+ };
+ static const guint8 channel_map_b[8][8] = {
+ {0},
+ {0, 1},
+ {0, 1, 2},
+ {0, 1, 2, 3},
+ {0, 1, 2, 3, 4},
+ {0, 1, 2, 3, 4, 5},
+ {0, 1, 2, 3, 4, 5, 6},
+ {0, 1, 2, 3, 4, 5, 6, 7},
+ };
+
+ gint channels = -1, stream_count, coupled_count, mapping_family;
+ guint8 *channel_mapping = NULL;
+
+ channels = channel_config_code ? (channel_config_code & 0x0f) : 2;
+ if (channel_config_code == 0 || channel_config_code == 0x80) {
+ /* Dual Mono */
+ mapping_family = 255;
+ if (channel_config_code == 0) {
+ stream_count = 1;
+ coupled_count = 1;
+ } else {
+ stream_count = 2;
+ coupled_count = 0;
+ }
+ channel_mapping = g_new0 (guint8, channels);
+ memcpy (channel_mapping, &channel_map_a[1], channels);
+ } else if (channel_config_code <= 8) {
+ mapping_family = (channels > 2) ? 1 : 0;
+ stream_count =
+ channel_config_code -
+ coupled_stream_counts[channel_config_code];
+ coupled_count = coupled_stream_counts[channel_config_code];
+ if (mapping_family != 0) {
+ channel_mapping = g_new0 (guint8, channels);
+ memcpy (channel_mapping, &channel_map_a[channels - 1],
+ channels);
+ }
+ } else if (channel_config_code >= 0x82
+ && channel_config_code <= 0x88) {
+ mapping_family = 1;
+ stream_count = channels;
+ coupled_count = 0;
+ channel_mapping = g_new0 (guint8, channels);
+ memcpy (channel_mapping, &channel_map_b[channels - 1],
+ channels);
+ } else if (channel_config_code == 0x81) {
+ if (gst_byte_reader_get_remaining (&br) < 2) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid Opus descriptor with extended channel configuration");
+ channels = -1;
+ break;
+ }
+
+ channels = gst_byte_reader_get_uint8_unchecked (&br);
+ mapping_family = gst_byte_reader_get_uint8_unchecked (&br);
+
+ /* Overwrite values from above */
+ if (channels == 0) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid Opus descriptor with extended channel configuration");
+ channels = -1;
+ break;
+ }
+
+ if (mapping_family == 0 && channels <= 2) {
+ stream_count = channels - coupled_stream_counts[channels];
+ coupled_count = coupled_stream_counts[channels];
+ } else {
+ GstBitReader breader;
+ guint8 stream_count_minus_one, coupled_stream_count;
+ gint stream_count_minus_one_len, coupled_stream_count_len;
+ gint channel_mapping_len, i;
+
+ gst_bit_reader_init (&breader,
+ gst_byte_reader_get_data_unchecked
+ (&br, gst_byte_reader_get_remaining
+ (&br)), gst_byte_reader_get_remaining (&br));
+
+ stream_count_minus_one_len = ceil (_gst_log2 (channels));
+ if (!gst_bit_reader_get_bits_uint8 (&breader,
+ &stream_count_minus_one,
+ stream_count_minus_one_len)) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid Opus descriptor with extended channel configuration");
+ channels = -1;
+ break;
+ }
+
+ stream_count = stream_count_minus_one + 1;
+ coupled_stream_count_len =
+ ceil (_gst_log2 (stream_count_minus_one + 2));
+
+ if (!gst_bit_reader_get_bits_uint8 (&breader,
+ &coupled_stream_count, coupled_stream_count_len)) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid Opus descriptor with extended channel configuration");
+ channels = -1;
+ break;
+ }
+
+ coupled_count = coupled_stream_count;
+
+ channel_mapping_len =
+ ceil (_gst_log2 (stream_count_minus_one + 1 +
+ coupled_stream_count + 1));
+ channel_mapping = g_new0 (guint8, channels);
+ for (i = 0; i < channels; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&breader,
+ &channel_mapping[i], channel_mapping_len)) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid Opus descriptor with extended channel configuration");
+ break;
+ }
+ }
+
+ /* error above */
+ if (i != channels) {
+ channels = -1;
+ g_free (channel_mapping);
+ channel_mapping = NULL;
+ break;
+ }
+ }
+ } else {
+ g_assert_not_reached ();
+ }
+
+ if (channels != -1) {
+ is_audio = TRUE;
+ caps =
+ gst_codec_utils_opus_create_caps (48000, channels,
+ mapping_family, stream_count, coupled_count,
+ channel_mapping);
+
+ g_free (channel_mapping);
+ }
+ } else {
+ GST_WARNING_OBJECT (demux,
+ "unexpected channel config code 0x%02x", channel_config_code);
+ }
+ } else {
+ GST_WARNING_OBJECT (demux, "Opus, but no extension descriptor");
+ }
+ break;
+ case DRF_ID_HEVC:
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ break;
+ case DRF_ID_KLVA:
+ sparse = TRUE;
+ is_private = TRUE;
+ caps = gst_caps_new_simple ("meta/x-klv",
+ "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case DRF_ID_AC4:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac4");
+ break;
+ }
+ if (caps)
+ break;
+
+ /* hack for itv hd (sid 10510, video pid 3401 */
+ if (program->program_number == 10510 && bstream->pid == 3401) {
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ }
+ break;
+ case ST_HDV_AUX_V:
+ /* FIXME : Should only be used with specific PMT registration_descriptor */
+ /* We don't expose those streams since they're only helper streams */
+ /* template = gst_static_pad_template_get (&private_template); */
+ /* name = g_strdup_printf ("private_%04x", bstream->pid); */
+ /* caps = gst_caps_new_simple ("hdv/aux-v", NULL); */
+ break;
+ case ST_HDV_AUX_A:
+ /* FIXME : Should only be used with specific PMT registration_descriptor */
+ /* We don't expose those streams since they're only helper streams */
+ /* template = gst_static_pad_template_get (&private_template); */
+ /* name = g_strdup_printf ("private_%04x", bstream->pid); */
+ /* caps = gst_caps_new_simple ("hdv/aux-a", NULL); */
+ break;
+ case GST_MPEGTS_STREAM_TYPE_AUDIO_AAC_ADTS:
+ is_audio = TRUE;
+ /* prefer mpegversion 4 since it's more commonly supported one */
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "adts", NULL);
+ /* we will set caps later once parsing adts header is done */
+ stream->atdsInfos.mpegversion = 4;
+ break;
+ case GST_MPEGTS_STREAM_TYPE_AUDIO_AAC_LATM:
+ is_audio = TRUE;
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "loas", NULL);
+ break;
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_MPEG4:
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_H264:
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ break;
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_HEVC:
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", NULL);
+ break;
+ case GST_MPEGTS_STREAM_TYPE_VIDEO_JP2K:
+ is_video = TRUE;
+ desc =
+ mpegts_get_descriptor_from_stream (bstream, GST_MTS_DESC_J2K_VIDEO);
+ if (desc == NULL) {
+ caps = gst_caps_new_empty_simple ("image/x-jpc");
+ break;
+ } else {
+ GstByteReader br;
+ guint16 DEN_frame_rate = 0;
+ guint16 NUM_frame_rate = 0;
+ guint8 color_specification = 0;
+ guint8 remaining_8b = 0;
+ gboolean interlaced_video = 0;
+ const gchar *interlace_mode = NULL;
+ const gchar *colorspace = NULL;
+ const gchar *colorimetry_mode = NULL;
+ guint16 profile_and_level G_GNUC_UNUSED;
+ guint32 horizontal_size G_GNUC_UNUSED;
+ guint32 vertical_size G_GNUC_UNUSED;
+ guint32 max_bit_rate G_GNUC_UNUSED;
+ guint32 max_buffer_size G_GNUC_UNUSED;
+ const guint desc_min_length = 24;
+
+ if (desc->length < desc_min_length) {
+ GST_ERROR
+ ("GST_MPEGTS_STREAM_TYPE_VIDEO_JP2K: descriptor length %d too short",
+ desc->length);
+ return NULL;
+ }
+
+ /* Skip the descriptor tag and length */
+ gst_byte_reader_init (&br, desc->data + 2, desc->length);
+
+ profile_and_level = gst_byte_reader_get_uint16_be_unchecked (&br);
+ horizontal_size = gst_byte_reader_get_uint32_be_unchecked (&br);
+ vertical_size = gst_byte_reader_get_uint32_be_unchecked (&br);
+ max_bit_rate = gst_byte_reader_get_uint32_be_unchecked (&br);
+ max_buffer_size = gst_byte_reader_get_uint32_be_unchecked (&br);
+ DEN_frame_rate = gst_byte_reader_get_uint16_be_unchecked (&br);
+ NUM_frame_rate = gst_byte_reader_get_uint16_be_unchecked (&br);
+ color_specification = gst_byte_reader_get_uint8_unchecked (&br);
+ remaining_8b = gst_byte_reader_get_uint8_unchecked (&br);
+ interlaced_video = remaining_8b & 0x40;
+ /* we don't support demuxing interlaced at the moment */
+ if (interlaced_video) {
+ GST_ERROR
+ ("GST_MPEGTS_STREAM_TYPE_VIDEO_JP2K: interlaced video not supported");
+ return NULL;
+ } else {
+ interlace_mode = "progressive";
+ stream->jp2kInfos.interlace = FALSE;
+ }
+ switch (color_specification) {
+ case GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_SRGB:
+ colorspace = "sRGB";
+ colorimetry_mode = GST_VIDEO_COLORIMETRY_SRGB;
+ break;
+ case GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_REC601:
+ colorspace = "sYUV";
+ colorimetry_mode = GST_VIDEO_COLORIMETRY_BT601;
+ break;
+ case GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_REC709:
+ case GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_CIELUV:
+ colorspace = "sYUV";
+ colorimetry_mode = GST_VIDEO_COLORIMETRY_BT709;
+ break;
+ default:
+ break;
+ }
+ caps = gst_caps_new_simple ("image/x-jpc",
+ "framerate", GST_TYPE_FRACTION, NUM_frame_rate, DEN_frame_rate,
+ "interlace-mode", G_TYPE_STRING, interlace_mode,
+ "colorimetry", G_TYPE_STRING, colorimetry_mode,
+ "colorspace", G_TYPE_STRING, colorspace, NULL);
+ }
+ break;
+ case ST_VIDEO_DIRAC:
+ if (bstream->registration_id == 0x64726163) {
+ GST_LOG ("dirac");
+ /* dirac in hex */
+ is_video = TRUE;
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ }
+ break;
+ case ST_PRIVATE_EA: /* Try to detect a VC1 stream */
+ {
+ gboolean is_vc1 = FALSE;
+
+ /* Note/FIXME: RP-227 specifies that the registration descriptor
+ * for vc1 can also contain other information, such as profile,
+ * level, alignment, buffer_size, .... */
+ if (bstream->registration_id == DRF_ID_VC1)
+ is_vc1 = TRUE;
+ if (!is_vc1) {
+ GST_WARNING ("0xea private stream type found but no descriptor "
+ "for VC1. Assuming plain VC1.");
+ }
+
+ is_video = TRUE;
+ caps = gst_caps_new_simple ("video/x-wmv",
+ "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
+
+ break;
+ }
+ case ST_PS_AUDIO_AC3:
+ /* DVB_ENHANCED_AC3 */
+ desc =
+ mpegts_get_descriptor_from_stream (bstream,
+ GST_MTS_DESC_DVB_ENHANCED_AC3);
+ if (desc) {
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ break;
+ }
+
+ /* If stream has ac3 descriptor
+ * OR program is ATSC (GA94)
+ * OR stream registration is AC-3
+ * then it's regular AC3 */
+ if (bstream->registration_id == DRF_ID_AC3 ||
+ program->registration_id == DRF_ID_GA94 ||
+ mpegts_get_descriptor_from_stream (bstream, GST_MTS_DESC_DVB_AC3)) {
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac3");
+ break;
+ }
+
+ GST_WARNING ("AC3 stream type found but no guaranteed "
+ "way found to differentiate between AC3 and EAC3. "
+ "Assuming plain AC3.");
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-ac3");
+ break;
+ case ST_PS_AUDIO_EAC3:
+ {
+ /* ATSC_ENHANCED_AC3 */
+ if (bstream->registration_id == DRF_ID_EAC3 ||
+ mpegts_get_descriptor_from_stream (bstream, GST_MTS_DESC_ATSC_EAC3)) {
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ break;
+ }
+
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX,
+ ("Assuming ATSC E-AC3 audio stream."),
+ ("ATSC E-AC3 stream type found but no guarantee way found to "
+ "differentiate among other standards (DVB, ISDB and etc..)"));
+
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-eac3");
+ break;
+ }
+ case ST_PS_AUDIO_LPCM2:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-private2-lpcm");
+ break;
+ case ST_PS_AUDIO_DTS:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
+ break;
+ case ST_PS_AUDIO_LPCM:
+ is_audio = TRUE;
+ caps = gst_caps_new_empty_simple ("audio/x-lpcm");
+ break;
+ case ST_PS_DVD_SUBPICTURE:
+ is_subpicture = TRUE;
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ sparse = TRUE;
+ break;
+ case 0x42:
+ /* hack for Chinese AVS video stream which use 0x42 as stream_id
+ * NOTE: this is unofficial and within the ISO reserved range. */
+ is_video = TRUE;
+ caps = gst_caps_new_empty_simple ("video/x-cavs");
+ break;
+ default:
+ GST_DEBUG ("Non-media stream (stream_type:0x%x). Not creating pad",
+ bstream->stream_type);
+ break;
+ }
+
+ done:
+ if (caps) {
+ if (is_audio) {
+ template = gst_static_pad_template_get (&audio_template);
+ name =
+ g_strdup_printf ("audio_%01x_%04x", demux->program_generation,
+ bstream->pid);
+ gst_stream_set_stream_type (bstream->stream_object,
+ GST_STREAM_TYPE_AUDIO);
+ } else if (is_video) {
+ template = gst_static_pad_template_get (&video_template);
+ name =
+ g_strdup_printf ("video_%01x_%04x", demux->program_generation,
+ bstream->pid);
+ gst_stream_set_stream_type (bstream->stream_object,
+ GST_STREAM_TYPE_VIDEO);
+ } else if (is_private) {
+ template = gst_static_pad_template_get (&private_template);
+ name =
+ g_strdup_printf ("private_%01x_%04x", demux->program_generation,
+ bstream->pid);
+ } else if (is_subpicture) {
+ template = gst_static_pad_template_get (&subpicture_template);
+ name =
+ g_strdup_printf ("subpicture_%01x_%04x", demux->program_generation,
+ bstream->pid);
+ gst_stream_set_stream_type (bstream->stream_object, GST_STREAM_TYPE_TEXT);
+ } else
+ g_assert_not_reached ();
+
+ }
+
+ if (template && name && caps) {
+ GstEvent *event;
+ const gchar *stream_id;
+
+ GST_LOG ("stream:%p creating pad with name %s and caps %" GST_PTR_FORMAT,
+ stream, name, caps);
+ pad = gst_pad_new_from_template (template, name);
+ gst_pad_set_active (pad, TRUE);
+ gst_pad_use_fixed_caps (pad);
+ stream_id = gst_stream_get_stream_id (bstream->stream_object);
+
+ event = gst_pad_get_sticky_event (base->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+ event = gst_event_new_stream_start (stream_id);
+ gst_event_set_stream (event, bstream->stream_object);
+ if (demux->have_group_id)
+ gst_event_set_group_id (event, demux->group_id);
+ if (sparse) {
+ gst_event_set_stream_flags (event, GST_STREAM_FLAG_SPARSE);
+ gst_stream_set_stream_flags (bstream->stream_object,
+ GST_STREAM_FLAG_SPARSE);
+ }
+ stream->sparse = sparse;
+ gst_stream_set_caps (bstream->stream_object, caps);
+ if (!stream->taglist)
+ stream->taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (stream->taglist, NULL,
+ caps);
+ gst_stream_set_tags (bstream->stream_object, stream->taglist);
+
+ gst_pad_push_event (pad, event);
+ gst_pad_set_caps (pad, caps);
+ gst_pad_set_query_function (pad, gst_ts_demux_srcpad_query);
+ gst_pad_set_event_function (pad, gst_ts_demux_srcpad_event);
+ }
+
+ g_free (name);
+ if (template)
+ gst_object_unref (template);
+ if (caps)
+ gst_caps_unref (caps);
+
+ return pad;
+ }
+
+ static gboolean
+ gst_ts_demux_stream_added (MpegTSBase * base, MpegTSBaseStream * bstream,
+ MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = (GstTSDemux *) base;
+ TSDemuxStream *stream = (TSDemuxStream *) bstream;
+
+ if (!stream->pad) {
+ /* Create the pad */
+ if (bstream->stream_type != 0xff) {
+ stream->pad = create_pad_for_stream (base, bstream, program);
+ if (stream->pad)
+ gst_flow_combiner_add_pad (demux->flowcombiner, stream->pad);
+ }
+
+ if (base->mode != BASE_MODE_PUSHING
+ && bstream->stream_type == GST_MPEGTS_STREAM_TYPE_VIDEO_H264) {
+ stream->scan_function =
+ (GstTsDemuxKeyFrameScanFunction) scan_keyframe_h264;
+ } else {
+ stream->scan_function = NULL;
+ }
+
+ stream->active = FALSE;
+
+ stream->need_newsegment = TRUE;
+ /* Reset segment if we're not doing an accurate seek */
+ demux->reset_segment =
+ (!(base->out_segment.flags & GST_SEEK_FLAG_ACCURATE));
+ stream->needs_keyframe = FALSE;
+ stream->discont = TRUE;
+ stream->pts = GST_CLOCK_TIME_NONE;
+ stream->dts = GST_CLOCK_TIME_NONE;
+ stream->first_pts = GST_CLOCK_TIME_NONE;
+ stream->raw_pts = -1;
+ stream->raw_dts = -1;
+ stream->pending_ts = TRUE;
+ stream->nb_out_buffers = 0;
+ stream->gap_ref_buffers = 0;
+ stream->gap_ref_pts = GST_CLOCK_TIME_NONE;
+ /* Only wait for a valid timestamp if we have a PCR_PID */
+ stream->pending_ts = program->pcr_pid < 0x1fff;
+ stream->continuity_counter = CONTINUITY_UNSET;
+ }
+
+ return (stream->pad != NULL);
+ }
+
+ static void
+ tsdemux_h264_parsing_info_clear (TSDemuxH264ParsingInfos * h264infos)
+ {
+ clear_simple_buffer (&h264infos->framedata);
+
+ if (h264infos->parser) {
+ gst_h264_nal_parser_free (h264infos->parser);
+ gst_byte_writer_free (h264infos->sps);
+ gst_byte_writer_free (h264infos->pps);
+ gst_byte_writer_free (h264infos->sei);
+ }
+ }
+
+ static void
+ gst_ts_demux_stream_removed (MpegTSBase * base, MpegTSBaseStream * bstream)
+ {
+ TSDemuxStream *stream = (TSDemuxStream *) bstream;
+
+ if (stream->pad) {
+ gst_flow_combiner_remove_pad (GST_TS_DEMUX_CAST (base)->flowcombiner,
+ stream->pad);
+ if (stream->active) {
+
+ if (gst_pad_is_active (stream->pad)) {
+ /* Flush out all data */
+ GST_DEBUG_OBJECT (stream->pad, "Flushing out pending data");
+ gst_ts_demux_push_pending_data ((GstTSDemux *) base, stream, NULL);
+
+ GST_DEBUG_OBJECT (stream->pad, "Pushing out EOS");
+ gst_pad_push_event (stream->pad, gst_event_new_eos ());
+ gst_pad_set_active (stream->pad, FALSE);
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "Removing pad");
+ gst_element_remove_pad (GST_ELEMENT_CAST (base), stream->pad);
+ stream->active = FALSE;
+ } else {
+ gst_object_unref (stream->pad);
+ }
+ stream->pad = NULL;
+ }
+
+ gst_ts_demux_stream_flush (stream, GST_TS_DEMUX_CAST (base), TRUE);
+
+ if (stream->taglist != NULL) {
+ gst_tag_list_unref (stream->taglist);
+ stream->taglist = NULL;
+ }
+
+ tsdemux_h264_parsing_info_clear (&stream->h264infos);
+ }
+
+ static void
+ activate_pad_for_stream (GstTSDemux * tsdemux, TSDemuxStream * stream)
+ {
+ if (stream->pad) {
+ GST_DEBUG_OBJECT (tsdemux, "Activating pad %s:%s for stream %p",
+ GST_DEBUG_PAD_NAME (stream->pad), stream);
+ gst_element_add_pad ((GstElement *) tsdemux, stream->pad);
+ stream->active = TRUE;
+ GST_DEBUG_OBJECT (stream->pad, "done adding pad");
+ } else if (((MpegTSBaseStream *) stream)->stream_type != 0xff) {
+ GST_DEBUG_OBJECT (tsdemux,
+ "stream %p (pid 0x%04x, type:0x%02x) has no pad", stream,
+ ((MpegTSBaseStream *) stream)->pid,
+ ((MpegTSBaseStream *) stream)->stream_type);
+ }
+ }
+
+ static void
+ gst_ts_demux_stream_flush (TSDemuxStream * stream, GstTSDemux * tsdemux,
+ gboolean hard)
+ {
+ GST_DEBUG ("flushing stream %p", stream);
+
+ g_free (stream->data);
+ stream->data = NULL;
+ stream->state = PENDING_PACKET_EMPTY;
+ stream->expected_size = 0;
+ stream->allocated_size = 0;
+ stream->current_size = 0;
+ stream->discont = TRUE;
+ stream->pts = GST_CLOCK_TIME_NONE;
+ stream->dts = GST_CLOCK_TIME_NONE;
+ stream->raw_pts = -1;
+ stream->raw_dts = -1;
+ stream->pending_ts = TRUE;
+ stream->nb_out_buffers = 0;
+ stream->gap_ref_buffers = 0;
+ stream->gap_ref_pts = GST_CLOCK_TIME_NONE;
+ stream->continuity_counter = CONTINUITY_UNSET;
+
+ if (G_UNLIKELY (stream->pending)) {
+ GList *tmp;
+
+ GST_DEBUG ("clearing pending %p", stream);
+ for (tmp = stream->pending; tmp; tmp = tmp->next) {
+ PendingBuffer *pend = (PendingBuffer *) tmp->data;
+ gst_buffer_unref (pend->buffer);
+ g_slice_free (PendingBuffer, pend);
+ }
+ g_list_free (stream->pending);
+ stream->pending = NULL;
+ }
+
+ if (hard) {
+ stream->first_pts = GST_CLOCK_TIME_NONE;
+ stream->need_newsegment = TRUE;
+ }
+ }
+
+ static void
+ gst_ts_demux_flush_streams (GstTSDemux * demux, gboolean hard)
+ {
+ GList *walk;
+ if (!demux->program)
+ return;
+
+ for (walk = demux->program->stream_list; walk; walk = g_list_next (walk))
+ gst_ts_demux_stream_flush (walk->data, demux, hard);
+ }
+
+ static gboolean
+ gst_ts_demux_can_remove_program (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (base);
+
+ /* If it's our current active program, we return FALSE, we'll deactivate it
+ * ourselves when the next program gets activated */
+ if (demux->program == program) {
+ GST_DEBUG
+ ("Attempting to remove current program, delaying until new program gets activated");
+ demux->previous_program = program;
+ demux->program_number = -1;
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ static void
+ gst_ts_demux_update_program (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (base);
+ GList *tmp;
+
+ GST_DEBUG ("Updating program %d", program->program_number);
+ /* Emit collection message */
+ gst_element_post_message ((GstElement *) base,
+ gst_message_new_stream_collection ((GstObject *) base,
+ program->collection));
+
+ /* Add all streams, then fire no-more-pads */
+ for (tmp = program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
++#ifdef TIZEN_FEATURE_TSDEMUX_UPDATE_STREAM
++ if (!stream->pad
++ || (stream->pad && !stream->active)) {
++#else
+ if (!stream->pad) {
++#endif
+ activate_pad_for_stream (demux, stream);
+ if (stream->sparse) {
+ /* force sending of pending sticky events which have been stored on the
+ * pad already and which otherwise would only be sent on the first buffer
+ * or serialized event (which means very late in case of subtitle streams),
+ * and playsink waits for stream-start or another serialized event */
+ GST_DEBUG_OBJECT (stream->pad, "sparse stream, pushing GAP event");
+ gst_pad_push_event (stream->pad, gst_event_new_gap (0, 0));
+ }
+ }
+ }
+ }
+
+ static void
+ gst_ts_demux_program_started (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (base);
+
+ GST_DEBUG ("Current program %d, new program %d requested program %d",
+ (gint) demux->program_number, program->program_number,
+ demux->requested_program_number);
+
+ if (demux->requested_program_number == program->program_number ||
+ (demux->requested_program_number == -1 && demux->program_number == -1)) {
+ GList *tmp;
+ gboolean have_pads = FALSE;
+
+ GST_LOG ("program %d started", program->program_number);
+ demux->program_number = program->program_number;
+ demux->program = program;
+
+ /* Increment the program_generation counter */
+ demux->program_generation = (demux->program_generation + 1) & 0xf;
+
+ /* Emit collection message */
+ gst_element_post_message ((GstElement *) base,
+ gst_message_new_stream_collection ((GstObject *) base,
+ program->collection));
+
+ /* If this is not the initial program, we need to calculate
+ * a new segment */
+ if (demux->segment_event) {
+ gst_event_unref (demux->segment_event);
+ demux->segment_event = NULL;
+ }
+
+ /* DRAIN ALL STREAMS FIRST ! */
+ if (demux->previous_program) {
+ GList *tmp;
+ GST_DEBUG_OBJECT (demux, "Draining previous program");
+ for (tmp = demux->previous_program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+ if (stream->pad)
+ gst_ts_demux_push_pending_data (demux, stream,
+ demux->previous_program);
+ }
+ }
+
+ /* Add all streams, then fire no-more-pads */
+ for (tmp = program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+ activate_pad_for_stream (demux, stream);
+ if (stream->pad)
+ have_pads = TRUE;
+ }
+
+ /* If there was a previous program, now is the time to deactivate it
+ * and remove old pads (including pushing EOS) */
+ if (demux->previous_program) {
+ GST_DEBUG ("Deactivating previous program");
+ mpegts_base_deactivate_and_free_program (base, demux->previous_program);
+ demux->previous_program = NULL;
+ }
+
+ if (!have_pads) {
+ /* If we had no pads, this stream is likely corrupted or unsupported and
+ * there's not much we can do at this point */
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE,
+ ("This stream contains no valid or supported streams."),
+ ("activating program but got no pads"));
+ return;
+ }
+
+ /* If any of the stream is sparse, push a GAP event before anything else
+ * This is done here, and not in activate_pad_for_stream() because pushing
+ * a GAP event *is* considering data, and we want to ensure the (potential)
+ * old pads are all removed before we push any data on the new ones */
+ for (tmp = program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+ if (stream->sparse) {
+ /* force sending of pending sticky events which have been stored on the
+ * pad already and which otherwise would only be sent on the first buffer
+ * or serialized event (which means very late in case of subtitle streams),
+ * and playsink waits for stream-start or another serialized event */
+ GST_DEBUG_OBJECT (stream->pad, "sparse stream, pushing GAP event");
+ gst_pad_push_event (stream->pad, gst_event_new_gap (0, 0));
+ }
+ }
+
+ gst_element_no_more_pads ((GstElement *) demux);
+ }
+ }
+
+ static void
+ gst_ts_demux_program_stopped (MpegTSBase * base, MpegTSBaseProgram * program)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX (base);
+
+ if (demux->program == program) {
+ demux->program = NULL;
+ demux->program_number = -1;
+ }
+ }
+
+
+ static inline void
+ gst_ts_demux_record_pts (GstTSDemux * demux, TSDemuxStream * stream,
+ guint64 pts, guint64 offset)
+ {
+ MpegTSBaseStream *bs = (MpegTSBaseStream *) stream;
+
+ stream->raw_pts = pts;
+ if (pts == -1) {
+ stream->pts = GST_CLOCK_TIME_NONE;
+ return;
+ }
+
+ GST_LOG ("pid 0x%04x raw pts:%" G_GUINT64_FORMAT " at offset %"
+ G_GUINT64_FORMAT, bs->pid, pts, offset);
+
+ /* Compute PTS in GstClockTime */
+ stream->pts =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (pts), demux->program->pcr_pid);
+
+ GST_LOG ("pid 0x%04x Stored PTS %" G_GUINT64_FORMAT, bs->pid, stream->pts);
+
+ if (G_UNLIKELY (demux->emit_statistics)) {
+ GstStructure *st;
+ st = gst_structure_new_id_empty (QUARK_TSDEMUX);
+ gst_structure_id_set (st,
+ QUARK_PID, G_TYPE_UINT, bs->pid,
+ QUARK_OFFSET, G_TYPE_UINT64, offset, QUARK_PTS, G_TYPE_UINT64, pts,
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ QUARK_BUFFER_PTS, G_TYPE_UINT64, stream->pts,
++#endif
+ NULL);
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT (demux), st));
+ }
+ }
+
+ static inline void
+ gst_ts_demux_record_dts (GstTSDemux * demux, TSDemuxStream * stream,
+ guint64 dts, guint64 offset)
+ {
+ MpegTSBaseStream *bs = (MpegTSBaseStream *) stream;
+
+ stream->raw_dts = dts;
+ if (dts == -1) {
+ stream->dts = GST_CLOCK_TIME_NONE;
+ return;
+ }
+
+ GST_LOG ("pid 0x%04x raw dts:%" G_GUINT64_FORMAT " at offset %"
+ G_GUINT64_FORMAT, bs->pid, dts, offset);
+
+ /* Compute DTS in GstClockTime */
+ stream->dts =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (dts), demux->program->pcr_pid);
+
+ GST_LOG ("pid 0x%04x Stored DTS %" G_GUINT64_FORMAT, bs->pid, stream->dts);
+
+ if (G_UNLIKELY (demux->emit_statistics)) {
+ GstStructure *st;
+ st = gst_structure_new_id_empty (QUARK_TSDEMUX);
+ gst_structure_id_set (st,
+ QUARK_PID, G_TYPE_UINT, bs->pid,
+ QUARK_OFFSET, G_TYPE_UINT64, offset, QUARK_DTS, G_TYPE_UINT64, dts,
+ NULL);
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT (demux), st));
+ }
+ }
+
+ /* This is called when we haven't got a valid initial PTS/DTS on all streams */
+ static gboolean
+ check_pending_buffers (GstTSDemux * demux)
+ {
+ gboolean have_observation = FALSE;
+ /* The biggest offset */
+ guint64 offset = 0;
+ GList *tmp;
+ gboolean have_only_sparse = TRUE;
+ gboolean exceeded_threshold = FALSE;
++#ifdef TIZEN_FEATURE_TSDEMUX_INVALID_PCR_PID
++ gboolean wrong_pcr_pid = FALSE;
++#endif
+
+ /* 0. Do we only have sparse stream */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *tmpstream = (TSDemuxStream *) tmp->data;
+
+ if (!tmpstream->sparse) {
+ have_only_sparse = FALSE;
+ break;
+ }
+ }
+
+ /* 1. Go over all streams */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *tmpstream = (TSDemuxStream *) tmp->data;
+ /* 1.1 check if at least one stream got a valid DTS */
+ if (have_only_sparse || !tmpstream->sparse) {
+ if ((tmpstream->raw_dts != -1 && tmpstream->dts != GST_CLOCK_TIME_NONE) ||
+ (tmpstream->raw_pts != -1 && tmpstream->pts != GST_CLOCK_TIME_NONE)) {
+ have_observation = TRUE;
+ break;
+ }
+ /* 1.2 Check if we exceeded the maximum threshold of pending data */
+ if (tmpstream->pending && (tmpstream->raw_dts != -1
+ || tmpstream->raw_pts != -1)) {
+ PendingBuffer *pend = tmpstream->pending->data;
+ guint64 lastval =
+ tmpstream->raw_dts != -1 ? tmpstream->raw_dts : tmpstream->raw_pts;
+ guint64 firstval = pend->dts != -1 ? pend->dts : pend->pts;
+ GstClockTime dur;
+ g_assert (firstval != -1);
+ dur = MPEGTIME_TO_GSTTIME (lastval - firstval);
+ GST_DEBUG_OBJECT (tmpstream->pad,
+ "Pending content duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (dur));
+ if (dur > 500 * GST_MSECOND) {
+ exceeded_threshold = TRUE;
+ break;
+ }
+ }
+ }
+ }
+
+ if (have_observation == FALSE) {
+ /* 2. If we don't have a valid value yet, break out */
+ if (!exceeded_threshold)
+ return FALSE;
+
++#ifdef TIZEN_FEATURE_TSDEMUX_INVALID_PCR_PID
++ /* Checking PCR other pid */
++ for (int i = 0; i < demux->program->pmt->streams->len; ++i) {
++ GstMpegtsPMTStream *pmt_stream =
++ g_ptr_array_index (demux->program->pmt->streams, i);
++ if (pmt_stream->pid == demux->program->pcr_pid)
++ continue;
++ if (GST_CLOCK_TIME_IS_VALID (mpegts_packetizer_get_pcr_base_time
++ (MPEG_TS_BASE_PACKETIZER (demux), pmt_stream->pid))) {
++ GST_WARNING ("PCR_PID will update %x -> %x", demux->program->pcr_pid,
++ pmt_stream->pid);
++ demux->program->pcr_pid = pmt_stream->pid;
++ wrong_pcr_pid = TRUE;
++ break;
++ }
++ }
++ if (!wrong_pcr_pid)
++ return FALSE;
++#endif
+ /* Except if we've exceed the maximum amount of pending buffers, in which
+ * case we ignore PCR from now on */
+ GST_DEBUG_OBJECT (demux,
+ "Saw more than 500ms of data without PCR. Ignoring PCR from now on");
+ GST_MPEGTS_BASE (demux)->ignore_pcr = TRUE;
+ demux->program->pcr_pid = 0x1fff;
+ g_object_notify (G_OBJECT (demux), "ignore-pcr");
+ }
+
+ /* 3. Go over all streams that have current/pending data */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *tmpstream = (TSDemuxStream *) tmp->data;
+ PendingBuffer *pend;
+ guint64 firstval, lastval, ts;
+
+ /* 3.1 Calculate the offset between current DTS and first DTS */
+ if (tmpstream->pending == NULL || tmpstream->state == PENDING_PACKET_EMPTY)
+ continue;
+ /* If we don't have any pending data, the offset is 0 for this stream */
+ if (tmpstream->pending == NULL)
+ break;
+ if (tmpstream->raw_dts != -1)
+ lastval = tmpstream->raw_dts;
+ else if (tmpstream->raw_pts != -1)
+ lastval = tmpstream->raw_pts;
+ else {
+ GST_WARNING ("Don't have a last DTS/PTS to use for offset recalculation");
+ continue;
+ }
+ pend = tmpstream->pending->data;
+ if (pend->dts != -1)
+ firstval = pend->dts;
+ else if (pend->pts != -1)
+ firstval = pend->pts;
+ else {
+ GST_WARNING
+ ("Don't have a first DTS/PTS to use for offset recalculation");
+ continue;
+ }
+ /* 3.2 Add to the offset the report TS for the current DTS */
+ ts = mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (lastval), demux->program->pcr_pid);
+ if (ts == GST_CLOCK_TIME_NONE) {
+ GST_WARNING ("THIS SHOULD NOT HAPPEN !");
+ continue;
+ }
+ ts += MPEGTIME_TO_GSTTIME (lastval - firstval);
+ /* 3.3 If that offset is bigger than the current offset, store it */
+ if (ts > offset)
+ offset = ts;
+ }
+
+ GST_DEBUG ("New initial pcr_offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (offset));
+
+ /* 4. Set the offset on the packetizer */
+ mpegts_packetizer_set_current_pcr_offset (MPEG_TS_BASE_PACKETIZER (demux),
+ offset, demux->program->pcr_pid);
+
+ /* 4. Go over all streams */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+
+ stream->pending_ts = FALSE;
+ /* 4.1 Set pending_ts for FALSE */
+
+ /* 4.2 Recalculate PTS/DTS (in running time) for pending data */
+ if (stream->pending) {
+ GList *tmp2;
+ for (tmp2 = stream->pending; tmp2; tmp2 = tmp2->next) {
+ PendingBuffer *pend = (PendingBuffer *) tmp2->data;
+ if (pend->pts != -1)
+ GST_BUFFER_PTS (pend->buffer) =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (pend->pts), demux->program->pcr_pid);
+ if (pend->dts != -1)
+ GST_BUFFER_DTS (pend->buffer) =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (pend->dts), demux->program->pcr_pid);
+ /* 4.2.2 Set first_pts to TS of lowest PTS (for segment) */
+ if (stream->first_pts == GST_CLOCK_TIME_NONE) {
+ if (GST_BUFFER_PTS (pend->buffer) != GST_CLOCK_TIME_NONE)
+ stream->first_pts = GST_BUFFER_PTS (pend->buffer);
+ else if (GST_BUFFER_DTS (pend->buffer) != GST_CLOCK_TIME_NONE)
+ stream->first_pts = GST_BUFFER_DTS (pend->buffer);
+ }
+ }
+ }
+ /* Recalculate PTS/DTS (in running time) for current data */
+ if (stream->state != PENDING_PACKET_EMPTY) {
+ if (stream->raw_pts != -1) {
+ stream->pts =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (stream->raw_pts), demux->program->pcr_pid);
+ if (stream->first_pts == GST_CLOCK_TIME_NONE)
+ stream->first_pts = stream->pts;
+ }
+ if (stream->raw_dts != -1) {
+ stream->dts =
+ mpegts_packetizer_pts_to_ts (MPEG_TS_BASE_PACKETIZER (demux),
+ MPEGTIME_TO_GSTTIME (stream->raw_dts), demux->program->pcr_pid);
+ if (stream->first_pts == GST_CLOCK_TIME_NONE)
+ stream->first_pts = stream->dts;
+ }
+ }
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_ts_demux_parse_pes_header (GstTSDemux * demux, TSDemuxStream * stream,
+ guint8 * data, guint32 length, guint64 bufferoffset)
+ {
+ PESHeader header;
+ PESParsingResult parseres;
+
+ GST_MEMDUMP ("Header buffer", data, MIN (length, 32));
+
+ parseres = mpegts_parse_pes_header (data, length, &header);
+ if (G_UNLIKELY (parseres == PES_PARSING_NEED_MORE))
+ goto discont;
+ if (G_UNLIKELY (parseres == PES_PARSING_BAD)) {
+ GST_WARNING ("Error parsing PES header. pid: 0x%x stream_type: 0x%x",
+ stream->stream.pid, stream->stream.stream_type);
+ goto discont;
+ }
+
+ if (stream->target_pes_substream != 0
+ && header.stream_id_extension != stream->target_pes_substream) {
+ GST_DEBUG ("Skipping unwanted substream");
+ goto discont;
+ }
+
+ gst_ts_demux_record_dts (demux, stream, header.DTS, bufferoffset);
+ gst_ts_demux_record_pts (demux, stream, header.PTS, bufferoffset);
+ if (G_UNLIKELY (stream->pending_ts &&
+ (stream->pts != GST_CLOCK_TIME_NONE
+ || stream->dts != GST_CLOCK_TIME_NONE))) {
+ GST_DEBUG ("Got pts/dts update, rechecking all streams");
+ check_pending_buffers (demux);
+ } else if (stream->first_pts == GST_CLOCK_TIME_NONE) {
+ if (GST_CLOCK_TIME_IS_VALID (stream->pts))
+ stream->first_pts = stream->pts;
+ else if (GST_CLOCK_TIME_IS_VALID (stream->dts))
+ stream->first_pts = stream->dts;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "stream PTS %" GST_TIME_FORMAT " DTS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->pts), GST_TIME_ARGS (stream->dts));
+
+ /* Remove PES headers */
+ GST_DEBUG ("Moving data forward by %d bytes (packet_size:%d, have:%d)",
+ header.header_size, header.packet_length, length);
+ stream->expected_size = header.packet_length;
+ if (stream->expected_size) {
+ if (G_LIKELY (stream->expected_size > header.header_size)) {
+ stream->expected_size -= header.header_size;
+ } else {
+ /* next packet will have to complete this one */
+ GST_WARNING ("invalid header and packet size combination, empty packet");
+ stream->expected_size = 0;
+ }
+ }
+ data += header.header_size;
+ length -= header.header_size;
+
+ /* Create the output buffer */
+ if (stream->expected_size)
+ stream->allocated_size = MAX (stream->expected_size, length);
+ else
+ stream->allocated_size = MAX (8192, length);
+
+ g_assert (stream->data == NULL);
+ stream->data = g_malloc (stream->allocated_size);
+ memcpy (stream->data, data, length);
+ stream->current_size = length;
+
+ stream->state = PENDING_PACKET_BUFFER;
+
+ return;
+
+ discont:
+ stream->state = PENDING_PACKET_DISCONT;
+ return;
+ }
+
+ /* ONLY CALL THIS:
+ * * WITH packet->payload != NULL
+ * * WITH pending/current flushed out if beginning of new PES packet
+ */
+ static inline void
+ gst_ts_demux_queue_data (GstTSDemux * demux, TSDemuxStream * stream,
+ MpegTSPacketizerPacket * packet)
+ {
+ guint8 *data;
+ guint size;
+ guint8 cc = FLAGS_CONTINUITY_COUNTER (packet->scram_afc_cc);
+
+ GST_LOG ("pid: 0x%04x state:%d", stream->stream.pid, stream->state);
+
+ size = packet->data_end - packet->payload;
+ data = packet->payload;
+
+ if (stream->continuity_counter == CONTINUITY_UNSET) {
+ GST_DEBUG ("CONTINUITY: Initialize to %d", cc);
+ } else if ((cc == stream->continuity_counter + 1 ||
+ (stream->continuity_counter == MAX_CONTINUITY && cc == 0))) {
+ GST_LOG ("CONTINUITY: Got expected %d", cc);
+ } else {
+ if (stream->state != PENDING_PACKET_EMPTY) {
+ if (packet->payload_unit_start_indicator) {
+ /* A mismatch is fatal, except if this is the beginning of a new
+ * frame (from which we can recover) */
+ if (G_UNLIKELY (stream->data)) {
+ g_free (stream->data);
+ stream->data = NULL;
+ }
+ stream->state = PENDING_PACKET_HEADER;
+ } else {
+ GST_WARNING ("CONTINUITY: Mismatch packet %d, stream %d",
+ cc, stream->continuity_counter);
+ stream->state = PENDING_PACKET_DISCONT;
+ }
+ }
+ }
+ stream->continuity_counter = cc;
+
+ if (stream->state == PENDING_PACKET_EMPTY) {
+ if (G_UNLIKELY (!packet->payload_unit_start_indicator)) {
+ stream->state = PENDING_PACKET_DISCONT;
+ GST_DEBUG ("Didn't get the first packet of this PES");
+ } else {
+ GST_LOG ("EMPTY=>HEADER");
+ stream->state = PENDING_PACKET_HEADER;
+ }
+ }
+
+ switch (stream->state) {
+ case PENDING_PACKET_HEADER:
+ {
+ GST_LOG ("HEADER: Parsing PES header");
+
+ /* parse the header */
+ gst_ts_demux_parse_pes_header (demux, stream, data, size, packet->offset);
+ break;
+ }
+ case PENDING_PACKET_BUFFER:
+ {
+ GST_LOG ("BUFFER: appending data");
+ if (G_UNLIKELY (stream->current_size + size > stream->allocated_size)) {
+ GST_LOG ("resizing buffer");
+ do {
+ stream->allocated_size = MAX (8192, 2 * stream->allocated_size);
+ } while (stream->current_size + size > stream->allocated_size);
+ stream->data = g_realloc (stream->data, stream->allocated_size);
+ }
+ memcpy (stream->data + stream->current_size, data, size);
+ stream->current_size += size;
+ break;
+ }
+ case PENDING_PACKET_DISCONT:
+ {
+ GST_LOG ("DISCONT: not storing/pushing");
+ if (G_UNLIKELY (stream->data)) {
+ g_free (stream->data);
+ stream->data = NULL;
+ }
+ stream->continuity_counter = CONTINUITY_UNSET;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return;
+ }
+
+ static void
+ calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream,
+ MpegTSBaseProgram * target_program)
+ {
+ MpegTSBase *base = (MpegTSBase *) demux;
+ GstClockTime lowest_pts = GST_CLOCK_TIME_NONE;
+ GstClockTime firstts = 0;
+ GList *tmp;
+
+ GST_DEBUG ("Creating new newsegment for stream %p", stream);
+
+ if (target_program == NULL)
+ target_program = demux->program;
+
+ /* Speedup : if we don't need to calculate anything, go straight to pushing */
+ if (demux->segment_event)
+ goto push_new_segment;
+
+ /* Calculate the 'new_start' value, used for newsegment */
+ for (tmp = target_program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *pstream = (TSDemuxStream *) tmp->data;
+
+ if (GST_CLOCK_TIME_IS_VALID (pstream->first_pts)) {
+ if (!GST_CLOCK_TIME_IS_VALID (lowest_pts)
+ || pstream->first_pts < lowest_pts)
+ lowest_pts = pstream->first_pts;
+ }
+ }
+ if (GST_CLOCK_TIME_IS_VALID (lowest_pts))
+ firstts = lowest_pts;
+ GST_DEBUG ("lowest_pts %" G_GUINT64_FORMAT " => clocktime %" GST_TIME_FORMAT,
+ lowest_pts, GST_TIME_ARGS (firstts));
+
+ if (base->out_segment.format != GST_FORMAT_TIME || demux->reset_segment) {
+ /* It will happen only if it's first program or after flushes. */
+ GST_DEBUG ("Calculating actual segment");
+ if (base->segment.format == GST_FORMAT_TIME) {
+ /* Try to recover segment info from base if it's in TIME format */
+ base->out_segment = base->segment;
+ } else {
+ /* Start from the first ts/pts */
+ GstSegment *seg = &base->out_segment;
+ GstClockTime base =
+ seg->base + seg->position - (seg->start + seg->offset);
+ GstClockTime stop = seg->stop;
+
+ gst_segment_init (seg, GST_FORMAT_TIME);
+ seg->start = firstts;
+ seg->stop = MAX (seg->start, stop);
+ seg->position = firstts;
+ seg->time = firstts;
+ seg->rate = demux->rate;
+ seg->base = base;
+ }
+ } else if (base->out_segment.start < firstts) {
+ /* Take into account the offset to the first buffer timestamp */
+ if (base->out_segment.rate > 0) {
+ if (GST_CLOCK_TIME_IS_VALID (base->out_segment.stop))
+ base->out_segment.stop += firstts - base->out_segment.start;
+ base->out_segment.start = firstts;
+ base->out_segment.position = firstts;
+ }
+ }
+
+ GST_LOG_OBJECT (demux, "Output segment now %" GST_SEGMENT_FORMAT,
+ &base->out_segment);
+
+ if (!demux->segment_event) {
+ demux->segment_event = gst_event_new_segment (&base->out_segment);
+
+ if (base->last_seek_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (demux->segment_event, base->last_seek_seqnum);
+ }
+
+ push_new_segment:
+ for (tmp = target_program->stream_list; tmp; tmp = tmp->next) {
+ stream = (TSDemuxStream *) tmp->data;
+ if (stream->pad == NULL)
+ continue;
+
+ if (demux->segment_event) {
+ GST_DEBUG_OBJECT (stream->pad, "Pushing newsegment event");
+
+ gst_event_ref (demux->segment_event);
+ gst_pad_push_event (stream->pad, demux->segment_event);
+ }
+
+ if (demux->global_tags) {
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_ref (demux->global_tags)));
+ }
++#ifdef TIZEN_FEATURE_TSDEMUX_LANG_TAG
++ if (demux->pending_custom_event) {
++ GST_DEBUG_OBJECT (stream->pad, "Pushing custom event");
++ gst_pad_push_event (stream->pad, demux->pending_custom_event);
++ demux->pending_custom_event = NULL;
++ }
++#endif
+
+ /* Push pending tags */
+ if (stream->taglist) {
++#ifdef TIZEN_FEATURE_TSDEMUX_LANG_TAG
++ if (demux->pending_language_tag) {
++ gchar *audio_codec = NULL;
++ if (gst_tag_list_get_string (stream->taglist, GST_TAG_AUDIO_CODEC,
++ &audio_codec)) {
++ GST_DEBUG ("Sending pending language tags %" GST_PTR_FORMAT,
++ demux->pending_language_tag);
++ gst_pad_push_event (stream->pad,
++ gst_event_new_tag (gst_tag_list_ref
++ (demux->pending_language_tag)));
++ g_free (audio_codec);
++ gst_tag_list_unref (demux->pending_language_tag);
++ demux->pending_language_tag = NULL;
++ }
++ }
++#endif
+ GST_DEBUG_OBJECT (stream->pad, "Sending tags %" GST_PTR_FORMAT,
+ stream->taglist);
+ gst_pad_push_event (stream->pad, gst_event_new_tag (stream->taglist));
+ stream->taglist = NULL;
+ }
+
+ stream->need_newsegment = FALSE;
+ }
+ }
+
+ static void
+ gst_ts_demux_check_and_sync_streams (GstTSDemux * demux, GstClockTime time)
+ {
+ GList *tmp;
+
+ GST_DEBUG_OBJECT (demux,
+ "Recheck streams and sync to at least: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time));
+
+ if (G_UNLIKELY (demux->program == NULL))
+ return;
+
+ /* Go over each stream and update it to at least 'time' time.
+ * For each stream, the pad stores the buffer counter the last time
+ * a gap check occurred (gap_ref_buffers) and a gap_ref_pts timestamp
+ * that is either the PTS from the stream or the PCR the pad was updated
+ * to.
+ *
+ * We can check nb_out_buffers to see if any buffers were pushed since then.
+ * This means we can detect buffers passing without PTSes fine and still generate
+ * gaps.
+ *
+ * If there haven't been any buffers pushed on this stream since the last
+ * gap check, push a gap event updating to the indicated input PCR time
+ * and update the pad's tracking.
+ *
+ * If there have been buffers pushed, update the reference buffer count
+ * and but don't push a gap event
+ */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *ps = (TSDemuxStream *) tmp->data;
+ GST_DEBUG_OBJECT (ps->pad,
+ "0x%04x, PTS:%" GST_TIME_FORMAT " REFPTS:%" GST_TIME_FORMAT " Gap:%"
+ GST_TIME_FORMAT " nb_buffers: %d (ref:%d)",
+ ((MpegTSBaseStream *) ps)->pid, GST_TIME_ARGS (ps->pts),
+ GST_TIME_ARGS (ps->gap_ref_pts),
+ GST_TIME_ARGS (ps->pts - ps->gap_ref_pts), ps->nb_out_buffers,
+ ps->gap_ref_buffers);
+ if (ps->pad == NULL)
+ continue;
+
+ if (ps->nb_out_buffers == ps->gap_ref_buffers && ps->gap_ref_pts != ps->pts) {
+ /* Do initial setup of pad if needed - segment etc */
+ GST_DEBUG_OBJECT (ps->pad,
+ "Stream needs update. Pushing GAP event to TS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time));
+ if (G_UNLIKELY (ps->need_newsegment))
+ calculate_and_push_newsegment (demux, ps, NULL);
+
+ /* Now send gap event */
+ gst_pad_push_event (ps->pad, gst_event_new_gap (time, 0));
+ }
+
+ /* Update GAP tracking vars so we don't re-check this stream for a while */
+ ps->gap_ref_pts = time;
+ if (ps->pts != GST_CLOCK_TIME_NONE && ps->pts > time)
+ ps->gap_ref_pts = ps->pts;
+ ps->gap_ref_buffers = ps->nb_out_buffers;
+ }
+ }
+
+ static GstBufferList *
+ parse_opus_access_unit (TSDemuxStream * stream)
+ {
+ GstByteReader reader;
+ GstBufferList *buffer_list = NULL;
+
+ buffer_list = gst_buffer_list_new ();
+ gst_byte_reader_init (&reader, stream->data, stream->current_size);
+
+ do {
+ GstBuffer *buffer;
+ guint16 id;
+ guint au_size = 0;
+ guint8 b;
+ gboolean start_trim_flag, end_trim_flag, control_extension_flag;
+ guint16 start_trim = 0, end_trim = 0;
+ guint8 *packet_data;
+ guint packet_size;
+
+ if (!gst_byte_reader_get_uint16_be (&reader, &id))
+ goto error;
+
+ /* No control header */
+ if ((id >> 5) != 0x3ff)
+ goto error;
+
+ do {
+ if (!gst_byte_reader_get_uint8 (&reader, &b))
+ goto error;
+ au_size += b;
+ } while (b == 0xff);
+
+ start_trim_flag = (id >> 4) & 0x1;
+ end_trim_flag = (id >> 3) & 0x1;
+ control_extension_flag = (id >> 2) & 0x1;
+
+ if (start_trim_flag) {
+ if (!gst_byte_reader_get_uint16_be (&reader, &start_trim))
+ goto error;
+ }
+
+ if (end_trim_flag) {
+ if (!gst_byte_reader_get_uint16_be (&reader, &end_trim))
+ goto error;
+ }
+
+ if (control_extension_flag) {
+ if (!gst_byte_reader_get_uint8 (&reader, &b))
+ goto error;
+
+ if (!gst_byte_reader_skip (&reader, b))
+ goto error;
+ }
+
+ packet_size = au_size;
+
+ /* FIXME: this should be
+ * packet_size = au_size - gst_byte_reader_get_pos (&reader);
+ * but ffmpeg and the only available sample stream from obe.tv
+ * are not including the control header size in au_size
+ */
+ if (gst_byte_reader_get_remaining (&reader) < packet_size)
+ goto error;
+ if (!gst_byte_reader_dup_data (&reader, packet_size, &packet_data))
+ goto error;
+
+ buffer = gst_buffer_new_wrapped (packet_data, packet_size);
+
+ if (start_trim != 0 || end_trim != 0) {
+ gst_buffer_add_audio_clipping_meta (buffer, GST_FORMAT_DEFAULT,
+ start_trim, end_trim);
+ }
+
+ gst_buffer_list_add (buffer_list, buffer);
+ } while (gst_byte_reader_get_remaining (&reader) > 0);
+
+ g_free (stream->data);
+ stream->data = NULL;
+ stream->current_size = 0;
+
+ return buffer_list;
+
+ error:
+ {
+ GST_ERROR ("Failed to parse Opus access unit");
+ g_free (stream->data);
+ stream->data = NULL;
+ stream->current_size = 0;
+ if (buffer_list)
+ gst_buffer_list_unref (buffer_list);
+ return NULL;
+ }
+ }
+
+ /* interlaced mode is disabled at the moment */
+ /*#define TSDEMUX_JP2K_SUPPORT_INTERLACE */
+ static GstBuffer *
+ parse_jp2k_access_unit (TSDemuxStream * stream)
+ {
+ GstByteReader reader;
+ /* header tag */
+ guint32 header_tag;
+ /* Framerate box */
+ guint16 den G_GNUC_UNUSED;
+ guint16 num G_GNUC_UNUSED;
+ /* Maximum bitrate box */
+ guint32 MaxBr G_GNUC_UNUSED;
+ guint32 AUF[2] = { 0, 0 };
+ #ifdef TSDEMUX_JP2K_SUPPORT_INTERLACE
+ /* Field Coding Box */
+ guint8 Fic G_GNUC_UNUSED = 1;
+ guint8 Fio G_GNUC_UNUSED = 0;
+ /* header size equals 38 for non-interlaced, and 48 for interlaced */
+ guint header_size = stream->jp2kInfos.interlace ? 48 : 38;
+ #else
+ /* header size equals 38 for non-interlaced, and 48 for interlaced */
+ guint header_size = 38;
+ #endif
+ /* Time Code box */
+ guint32 HHMMSSFF G_GNUC_UNUSED;
+ /* Broadcast color box */
+ guint8 CollC G_GNUC_UNUSED;
+ guint8 b G_GNUC_UNUSED;
+
+ guint data_location;
+ GstBuffer *retbuf = NULL;
+
+ if (stream->current_size < header_size) {
+ GST_ERROR_OBJECT (stream->pad, "Not enough data for header");
+ goto error;
+ }
+
+ gst_byte_reader_init (&reader, stream->data, stream->current_size);
+
+ /* Check for the location of the jp2k magic */
+ data_location =
+ gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0xff4fff51, 0,
+ stream->current_size);
+ GST_DEBUG_OBJECT (stream->pad, "data location %d", data_location);
+ if (data_location == -1) {
+ GST_ERROR_OBJECT (stream->pad, "Stream does not contain jp2k magic header");
+ goto error;
+ }
+
+ /* Elementary stream header box 'elsm' == 0x656c736d */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag != 0x656c736d) {
+ GST_ERROR_OBJECT (stream->pad, "Expected ELSM box but found box %x instead",
+ header_tag);
+ goto error;
+ }
+ /* Frame rate box 'frat' == 0x66726174 */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag != 0x66726174) {
+ GST_ERROR_OBJECT (stream->pad,
+ "Expected frame rate box, but found box %x instead", header_tag);
+ goto error;
+
+ }
+ den = gst_byte_reader_get_uint16_be_unchecked (&reader);
+ num = gst_byte_reader_get_uint16_be_unchecked (&reader);
+ /* Maximum bit rate box 'brat' == 0x62726174 */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag != 0x62726174) {
+ GST_ERROR_OBJECT (stream->pad, "Expected brat box but read box %x instead",
+ header_tag);
+ goto error;
+
+ }
+ MaxBr = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ AUF[0] = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (stream->jp2kInfos.interlace) {
+ #ifdef TSDEMUX_JP2K_SUPPORT_INTERLACE
+ AUF[1] = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ /* Field Coding Box 'fiel' == 0x6669656c */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag != 0x6669656c) {
+ GST_ERROR_OBJECT (stream->pad,
+ "Expected Field Coding box but found box %x instead", header_tag);
+ goto error;
+ }
+ Fic = gst_byte_reader_get_uint8_unchecked (&reader);
+ Fio = gst_byte_reader_get_uint8_unchecked (&reader);
+ #else
+ GST_ERROR_OBJECT (stream->pad, "interlaced mode not supported");
+ goto error;
+ #endif
+ }
+
+ /* Time Code Box 'tcod' == 0x74636f64 */
+ /* Some progressive streams might have a AUF[1] of value 0 present */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag == 0 && !stream->jp2kInfos.interlace) {
+ AUF[1] = header_tag;
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ /* Bump up header size and recheck */
+ header_size += 4;
+ if (stream->current_size < header_size) {
+ GST_ERROR_OBJECT (stream->pad, "Not enough data for header");
+ goto error;
+ }
+ }
+ if (header_tag != 0x74636f64) {
+ GST_ERROR_OBJECT (stream->pad,
+ "Expected Time code box but found %d box instead", header_tag);
+ goto error;
+ }
+ HHMMSSFF = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ /* Broadcast Color Box 'bcol' == 0x6263686c */
+ header_tag = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (header_tag != 0x62636f6c) {
+ GST_ERROR_OBJECT (stream->pad,
+ "Expected Broadcast color box but found %x box instead", header_tag);
+ goto error;
+ }
+ CollC = gst_byte_reader_get_uint8_unchecked (&reader);
+ b = gst_byte_reader_get_uint8_unchecked (&reader);
+
+ /* Check if we have enough data to create a valid buffer */
+ if ((stream->current_size - data_location) < (AUF[0] + AUF[1])) {
+ GST_ERROR_OBJECT
+ (stream->pad,
+ "Required size (%d) greater than remaining size in buffer (%d)",
+ AUF[0] + AUF[1], (stream->current_size - data_location));
+ if (stream->expected_size && stream->current_size != stream->expected_size) {
+ /* warn if buffer is truncated due to draining */
+ GST_WARNING_OBJECT
+ (stream->pad,
+ "Truncated buffer: current size (%d) doesn't match expected size (%d)",
+ stream->current_size, stream->expected_size);
+ } else {
+ /* kill pipeline if either we don't know what expected size is, or
+ * we know the expected size, and thus are sure that the buffer is not
+ * truncated due to draining */
+ goto error;
+ }
+ }
+
+ retbuf = gst_buffer_new_wrapped_full (0, stream->data, stream->current_size,
+ data_location, stream->current_size - data_location,
+ stream->data, g_free);
+ stream->data = NULL;
+ stream->current_size = 0;
+ return retbuf;
+
+ error:
+ GST_ERROR ("Failed to parse JP2K access unit");
+ g_free (stream->data);
+ stream->data = NULL;
+ stream->current_size = 0;
+ return NULL;
+ }
+
+ static GstBuffer *
+ parse_aac_adts_frame (TSDemuxStream * stream)
+ {
+ gint data_location = -1;
+ guint frame_len;
+ guint crc_size;
+ guint mpegversion = 4;
+ gint i;
+
+ if (stream->current_size < 6) {
+ GST_DEBUG_OBJECT (stream->pad, "Not enough data for header");
+ goto out;
+ }
+
+ /* check syncword */
+ for (i = 0; i < stream->current_size - 2; i++) {
+ if ((stream->data[i] == 0xff) && ((stream->data[i + 1] & 0xf6) == 0xf0)) {
+ data_location = i;
+ break;
+ }
+ }
+
+ GST_TRACE_OBJECT (stream->pad, "data location %d", data_location);
+
+ if (data_location == -1) {
+ GST_DEBUG_OBJECT (stream->pad, "Stream does not contain adts syncword");
+ goto out;
+ }
+
+ if (stream->current_size - data_location < 6) {
+ GST_DEBUG_OBJECT (stream->pad, "Not enough data for header");
+ goto out;
+ }
+
+ frame_len = ((stream->data[data_location + 3] & 0x03) << 11) |
+ (stream->data[data_location + 4] << 3) | ((stream->data[data_location +
+ 5] & 0xe0) >> 5);
+
+ crc_size = (stream->data[data_location + 1] & 0x01) ? 0 : 2;
+
+ if (frame_len < 7 + crc_size) {
+ GST_DEBUG_OBJECT (stream->pad, "Invalid frame len %d", frame_len);
+ goto out;
+ }
+
+ /* this seems to be valid adts header, check mpeg version now
+ *
+ * TODO: check channels, rate, and profile and then update caps too?
+ */
+ mpegversion = (stream->data[data_location + 1] & 0x08) ? 2 : 4;
+
+ out:
+ if (mpegversion != stream->atdsInfos.mpegversion) {
+ GstCaps *caps;
+ MpegTSBaseStream *bstream = (MpegTSBaseStream *) stream;
+
+ GST_DEBUG_OBJECT (stream->pad, "Update mpegversion from %d to %d",
+ stream->atdsInfos.mpegversion, mpegversion);
+ stream->atdsInfos.mpegversion = mpegversion;
+
+ caps = gst_stream_get_caps (bstream->stream_object);
+ caps = gst_caps_make_writable (caps);
+
+ gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT, mpegversion, NULL);
+ gst_stream_set_caps (bstream->stream_object, caps);
+ gst_pad_set_caps (stream->pad, caps);
+ gst_caps_unref (caps);
+ }
+
+ return gst_buffer_new_wrapped (stream->data, stream->current_size);
+ }
+
+
+ static GstFlowReturn
+ gst_ts_demux_push_pending_data (GstTSDemux * demux, TSDemuxStream * stream,
+ MpegTSBaseProgram * target_program)
+ {
+ MpegTSBase *base = GST_MPEGTS_BASE (demux);
+ GstFlowReturn res = GST_FLOW_OK;
+ MpegTSBaseStream *bs = (MpegTSBaseStream *) stream;
+ GstBuffer *buffer = NULL;
+ GstBufferList *buffer_list = NULL;
+
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "stream:%p, pid:0x%04x stream_type:%d state:%d", stream, bs->pid,
+ bs->stream_type, stream->state);
+
+ if (G_UNLIKELY (stream->data == NULL)) {
+ GST_LOG ("stream->data == NULL");
+ goto beach;
+ }
+
+ if (G_UNLIKELY (stream->state == PENDING_PACKET_EMPTY)) {
+ GST_LOG ("EMPTY: returning");
+ goto beach;
+ }
+
+ if (G_UNLIKELY (stream->state != PENDING_PACKET_BUFFER)) {
+ GST_LOG ("state:%d, returning", stream->state);
+ goto beach;
+ }
+
+ if (G_UNLIKELY (demux->program == NULL)) {
+ GST_LOG_OBJECT (demux, "No program");
+ g_free (stream->data);
+ goto beach;
+ }
+
+ if (stream->needs_keyframe) {
+ MpegTSBase *base = (MpegTSBase *) demux;
+
+ if ((gst_ts_demux_adjust_seek_offset_for_keyframe (stream, stream->data,
+ stream->current_size)) || demux->last_seek_offset == 0) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "Got Keyframe, ready to go at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->pts));
+
+ if (bs->stream_type == GST_MPEGTS_STREAM_TYPE_PRIVATE_PES_PACKETS &&
+ bs->registration_id == DRF_ID_OPUS) {
+ buffer_list = parse_opus_access_unit (stream);
+ if (!buffer_list) {
+ res = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ if (gst_buffer_list_length (buffer_list) == 1) {
+ buffer = gst_buffer_ref (gst_buffer_list_get (buffer_list, 0));
+ gst_buffer_list_unref (buffer_list);
+ buffer_list = NULL;
+ }
+ } else if (bs->stream_type == GST_MPEGTS_STREAM_TYPE_VIDEO_JP2K) {
+ buffer = parse_jp2k_access_unit (stream);
+ if (!buffer) {
+ res = GST_FLOW_ERROR;
+ goto beach;
+ }
+ } else {
+ buffer = gst_buffer_new_wrapped (stream->data, stream->current_size);
+ }
+
+ stream->seeked_pts = stream->pts;
+ stream->seeked_dts = stream->dts;
+ stream->needs_keyframe = FALSE;
+ } else {
+ GList *tmp;
+ GST_DEBUG_OBJECT (stream->pad, "Rewinding after keyframe seek failure");
+ base->seek_offset = demux->last_seek_offset - 200 * base->packetsize;
+ if (demux->last_seek_offset < 200 * base->packetsize)
+ base->seek_offset = 0;
+ demux->last_seek_offset = base->seek_offset;
+ mpegts_packetizer_flush (base->packetizer, FALSE);
+
+ /* Reset all streams accordingly */
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *cand = tmp->data;
+
+ GST_DEBUG_OBJECT (cand->pad, "Clearing stream");
+ cand->continuity_counter = CONTINUITY_UNSET;
+ cand->state = PENDING_PACKET_EMPTY;
+ if (cand->data)
+ g_free (cand->data);
+ cand->data = NULL;
+ cand->allocated_size = 0;
+ cand->current_size = 0;
+ }
+ base->mode = BASE_MODE_SEEKING;
+
+ res = GST_FLOW_REWINDING;
+ goto beach;
+ }
+ } else {
+ if (bs->stream_type == GST_MPEGTS_STREAM_TYPE_PRIVATE_PES_PACKETS &&
+ bs->registration_id == DRF_ID_OPUS) {
+ buffer_list = parse_opus_access_unit (stream);
+ if (!buffer_list) {
+ res = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ if (gst_buffer_list_length (buffer_list) == 1) {
+ buffer = gst_buffer_ref (gst_buffer_list_get (buffer_list, 0));
+ gst_buffer_list_unref (buffer_list);
+ buffer_list = NULL;
+ }
+ } else if (bs->stream_type == GST_MPEGTS_STREAM_TYPE_VIDEO_JP2K) {
+ buffer = parse_jp2k_access_unit (stream);
+ if (!buffer) {
+ res = GST_FLOW_ERROR;
+ goto beach;
+ }
+ } else if (bs->stream_type == GST_MPEGTS_STREAM_TYPE_AUDIO_AAC_ADTS) {
+ buffer = parse_aac_adts_frame (stream);
+ if (!buffer) {
+ res = GST_FLOW_ERROR;
+ goto beach;
+ }
+ } else {
+ buffer = gst_buffer_new_wrapped (stream->data, stream->current_size);
+ }
+
+ if (G_UNLIKELY (stream->pending_ts && !check_pending_buffers (demux))) {
+ if (buffer) {
+ PendingBuffer *pend;
+ pend = g_slice_new0 (PendingBuffer);
+ pend->buffer = buffer;
+ pend->pts = stream->raw_pts;
+ pend->dts = stream->raw_dts;
+ stream->pending = g_list_append (stream->pending, pend);
+ } else {
+ guint i, n;
+
+ n = gst_buffer_list_length (buffer_list);
+ for (i = 0; i < n; i++) {
+ PendingBuffer *pend;
+ pend = g_slice_new0 (PendingBuffer);
+ pend->buffer = gst_buffer_ref (gst_buffer_list_get (buffer_list, i));
+ pend->pts = i == 0 ? stream->raw_pts : -1;
+ pend->dts = i == 0 ? stream->raw_dts : -1;
+ stream->pending = g_list_append (stream->pending, pend);
+ }
+ gst_buffer_list_unref (buffer_list);
+ }
+ GST_DEBUG ("Not enough information to push buffers yet, storing buffer");
+ goto beach;
+ }
+ }
+
+ if (G_UNLIKELY (stream->need_newsegment))
+ calculate_and_push_newsegment (demux, stream, target_program);
+
+ /* FIXME : Push pending buffers if any */
+ if (G_UNLIKELY (stream->pending)) {
+ GList *tmp;
+ for (tmp = stream->pending; tmp; tmp = tmp->next) {
+ PendingBuffer *pend = (PendingBuffer *) tmp->data;
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Pushing pending buffer PTS:%" GST_TIME_FORMAT " DTS:%"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (pend->buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (pend->buffer)));
+
+ if (stream->discont)
+ GST_BUFFER_FLAG_SET (pend->buffer, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+
+ res = gst_pad_push (stream->pad, pend->buffer);
+ stream->nb_out_buffers += 1;
+ g_slice_free (PendingBuffer, pend);
+ }
+ g_list_free (stream->pending);
+ stream->pending = NULL;
+ }
+
+ if ((GST_CLOCK_TIME_IS_VALID (stream->seeked_pts)
+ && stream->pts < stream->seeked_pts) ||
+ (GST_CLOCK_TIME_IS_VALID (stream->seeked_dts) &&
+ stream->pts < stream->seeked_dts)) {
+ GST_INFO_OBJECT (stream->pad,
+ "Droping with PTS: %" GST_TIME_FORMAT " DTS: %" GST_TIME_FORMAT
+ " after seeking as other stream needed to be seeked further"
+ "(seeked PTS: %" GST_TIME_FORMAT " DTS: %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (stream->pts), GST_TIME_ARGS (stream->dts),
+ GST_TIME_ARGS (stream->seeked_pts), GST_TIME_ARGS (stream->seeked_dts));
+ if (buffer)
+ gst_buffer_unref (buffer);
+ if (buffer_list)
+ gst_buffer_list_unref (buffer_list);
+ goto beach;
+ }
+
+ GST_DEBUG_OBJECT (stream->pad, "stream->pts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->pts));
+
+ /* Decorate buffer or first buffer of the buffer list */
+ if (buffer_list)
+ buffer = gst_buffer_list_get (buffer_list, 0);
+
+ if (GST_CLOCK_TIME_IS_VALID (stream->pts))
+ GST_BUFFER_PTS (buffer) = GST_BUFFER_DTS (buffer) = stream->pts;
+ /* DTS = PTS by default, we override it if there's a real DTS */
+ if (GST_CLOCK_TIME_IS_VALID (stream->dts))
+ GST_BUFFER_DTS (buffer) = stream->dts;
+
+ if (stream->discont)
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+
+ if (buffer_list)
+ buffer = NULL;
+
+ GST_DEBUG_OBJECT (stream->pad,
+ "Pushing buffer%s with PTS: %" GST_TIME_FORMAT " , DTS: %"
+ GST_TIME_FORMAT, (buffer_list ? "list" : ""), GST_TIME_ARGS (stream->pts),
+ GST_TIME_ARGS (stream->dts));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream->dts)) {
+ if (stream->dts > base->out_segment.position)
+ base->out_segment.position = stream->dts;
+ } else if (GST_CLOCK_TIME_IS_VALID (stream->pts)) {
+ if (stream->pts > base->out_segment.position)
+ base->out_segment.position = stream->pts;
+ }
+
+ if (buffer) {
+ res = gst_pad_push (stream->pad, buffer);
+ /* Record that a buffer was pushed */
+ stream->nb_out_buffers += 1;
+ } else {
+ guint n = gst_buffer_list_length (buffer_list);
+ res = gst_pad_push_list (stream->pad, buffer_list);
+ /* Record that a buffer was pushed */
+ stream->nb_out_buffers += n;
+ }
+ GST_DEBUG_OBJECT (stream->pad, "Returned %s", gst_flow_get_name (res));
+ res = gst_flow_combiner_update_flow (demux->flowcombiner, res);
+ GST_DEBUG_OBJECT (stream->pad, "combined %s", gst_flow_get_name (res));
+
+ /* GAP / sparse stream tracking */
+ if (G_UNLIKELY (stream->gap_ref_pts == GST_CLOCK_TIME_NONE))
+ stream->gap_ref_pts = stream->pts;
+ else {
+ /* Look if the stream PTS has advanced 2 seconds since the last
+ * gap check, and sync streams if it has. The first stream to
+ * hit this will trigger a gap check */
+ if (G_UNLIKELY (stream->pts != GST_CLOCK_TIME_NONE &&
+ stream->pts > stream->gap_ref_pts + 2 * GST_SECOND)) {
+ if (demux->program->pcr_pid != 0x1fff) {
+ GstClockTime curpcr =
+ mpegts_packetizer_get_current_time (MPEG_TS_BASE_PACKETIZER (demux),
+ demux->program->pcr_pid);
+ if (curpcr == GST_CLOCK_TIME_NONE || curpcr < 800 * GST_MSECOND)
+ goto beach;
+ curpcr -= 800 * GST_MSECOND;
+ /* Use the current PCR (with a safety margin) to sync against */
+ gst_ts_demux_check_and_sync_streams (demux, curpcr);
+ } else {
+ /* If we don't have a PCR track, just use the current stream PTS */
+ gst_ts_demux_check_and_sync_streams (demux, stream->pts);
+ }
+ }
+ }
+
+ beach:
+ /* Reset the PES payload collection, but don't clear the state,
+ * we might want to keep collecting this PES */
+ GST_LOG ("Cleared PES data. returning %s", gst_flow_get_name (res));
+ if (stream->expected_size) {
+ if (stream->current_size > stream->expected_size)
+ stream->expected_size = 0;
+ else
+ stream->expected_size -= stream->current_size;
+ }
+ stream->data = NULL;
+ stream->allocated_size = 0;
+ stream->current_size = 0;
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_ts_demux_handle_packet (GstTSDemux * demux, TSDemuxStream * stream,
+ MpegTSPacketizerPacket * packet, GstMpegtsSection * section)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+
+ GST_LOG ("pid 0x%04x pusi:%d, afc:%d, cont:%d, payload:%p", packet->pid,
+ packet->payload_unit_start_indicator, packet->scram_afc_cc & 0x30,
+ FLAGS_CONTINUITY_COUNTER (packet->scram_afc_cc), packet->payload);
+
+ if (G_UNLIKELY (packet->payload_unit_start_indicator) &&
+ FLAGS_HAS_PAYLOAD (packet->scram_afc_cc)) {
+ /* Flush previous data */
+ res = gst_ts_demux_push_pending_data (demux, stream, NULL);
+ if (res != GST_FLOW_REWINDING) {
+ /* Tell the data collecting to expect this header. We don't do this when
+ * rewinding since the states will have been resetted accordingly */
+ stream->state = PENDING_PACKET_HEADER;
+ }
+ }
+
+ if (packet->payload && (res == GST_FLOW_OK || res == GST_FLOW_NOT_LINKED)
+ && stream->pad) {
+ gst_ts_demux_queue_data (demux, stream, packet);
+ GST_LOG ("current_size:%d, expected_size:%d",
+ stream->current_size, stream->expected_size);
+ /* Finally check if the data we queued completes a packet, or got too
+ * large and needs output now */
+ if ((stream->expected_size && stream->current_size >= stream->expected_size)
+ || (stream->current_size >= MAX_PES_PAYLOAD)) {
+ GST_LOG ("pushing packet of size %u", stream->current_size);
+ res = gst_ts_demux_push_pending_data (demux, stream, NULL);
+ }
+ }
+
+ /* We are rewinding to find a keyframe,
+ * and didn't want the data to be queued
+ */
+ if (res == GST_FLOW_REWINDING)
+ res = GST_FLOW_OK;
+
+ return res;
+ }
+
+ static void
+ gst_ts_demux_flush (MpegTSBase * base, gboolean hard)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX_CAST (base);
+
+ gst_ts_demux_flush_streams (demux, hard);
+
+ if (demux->segment_event) {
+ gst_event_unref (demux->segment_event);
+ demux->segment_event = NULL;
+ }
+ if (demux->global_tags) {
+ gst_tag_list_unref (demux->global_tags);
+ demux->global_tags = NULL;
+ }
+ if (hard) {
+ /* For pull mode seeks the current segment needs to be preserved */
+ demux->rate = 1.0;
+ gst_segment_init (&base->out_segment, GST_FORMAT_UNDEFINED);
+ }
+ }
+
+ static GstFlowReturn
+ gst_ts_demux_drain (MpegTSBase * base)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX_CAST (base);
+ GList *tmp;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ if (!demux->program)
+ return res;
+
+ for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
+ TSDemuxStream *stream = (TSDemuxStream *) tmp->data;
+ if (stream->pad) {
+ res = gst_ts_demux_push_pending_data (demux, stream, NULL);
+ if (G_UNLIKELY (res != GST_FLOW_OK))
+ break;
+ }
+ }
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_ts_demux_push (MpegTSBase * base, MpegTSPacketizerPacket * packet,
+ GstMpegtsSection * section)
+ {
+ GstTSDemux *demux = GST_TS_DEMUX_CAST (base);
+ TSDemuxStream *stream = NULL;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ if (G_LIKELY (demux->program)) {
+ stream = (TSDemuxStream *) demux->program->streams[packet->pid];
+
+ if (stream) {
+ res = gst_ts_demux_handle_packet (demux, stream, packet, section);
+ }
+ }
+ return res;
+ }
--- /dev/null
+ /*
+ * tsdemux - GStreamer MPEG transport stream demuxer
+ * Copyright (C) 2009 Zaheer Abbas Merali
+ * 2010 Edward Hervey
+ *
+ * Authors:
+ * Zaheer Abbas Merali <zaheerabbas at merali dot org>
+ * Edward Hervey <edward.hervey@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ #ifndef GST_TS_DEMUX_H
+ #define GST_TS_DEMUX_H
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbytereader.h>
+ #include <gst/base/gstflowcombiner.h>
+ #include "mpegtsbase.h"
+ #include "mpegtspacketizer.h"
+
+ /* color specifications for JPEG 2000 stream over MPEG TS */
+ typedef enum
+ {
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_UNKNOWN,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_SRGB,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_REC601,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_REC709,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_CIELUV,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_CIEXYZ,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_REC2020,
+ GST_MPEGTSDEMUX_JPEG2000_COLORSPEC_SMPTE2084
+ } GstMpegTsDemuxJpeg2000ColorSpec;
+
+
+ G_BEGIN_DECLS
+ #define GST_TYPE_TS_DEMUX \
+ (gst_ts_demux_get_type())
+ #define GST_TS_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_TS_DEMUX,GstTSDemux))
+ #define GST_TS_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_TS_DEMUX,GstTSDemuxClass))
+ #define GST_IS_TS_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_TS_DEMUX))
+ #define GST_IS_TS_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_TS_DEMUX))
+ #define GST_TS_DEMUX_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_TS_DEMUX, GstTSDemuxClass))
+ #define GST_TS_DEMUX_CAST(obj) ((GstTSDemux*) obj)
+ typedef struct _GstTSDemux GstTSDemux;
+ typedef struct _GstTSDemuxClass GstTSDemuxClass;
+
+ struct _GstTSDemux
+ {
+ MpegTSBase parent;
+
+ gboolean have_group_id;
+ guint group_id;
+
+ /* the following vars must be protected with the OBJECT_LOCK as they can be
+ * accessed from the application thread and the streaming thread */
+ gint requested_program_number; /* Required program number (ignore:-1) */
+ guint program_number;
+ gboolean emit_statistics;
+ gint latency; /* latency in ms */
+
+ /*< private >*/
+ gint program_generation; /* Incremented each time we switch program 0..15 */
+ MpegTSBaseProgram *program; /* Current program */
+ MpegTSBaseProgram *previous_program; /* Previous program, to deactivate once
+ * the new program becomes active */
+
+ /* segments to be sent */
+ GstEvent *segment_event;
+ gboolean reset_segment;
+
+ /* global taglist */
+ GstTagList *global_tags;
+
++#ifdef TIZEN_FEATURE_TSDEMUX_LANG_TAG
++ GstTagList *pending_language_tag;
++ GstEvent *pending_custom_event;
++#endif
++
+ /* Full stream duration */
+ GstClockTime duration;
+
+ /* Pending seek rate (default 1.0) */
+ gdouble rate;
+
+ GstFlowCombiner *flowcombiner;
+
+ /* Used when seeking for a keyframe to go backward in the stream */
+ guint64 last_seek_offset;
+ };
+
+ struct _GstTSDemuxClass
+ {
+ MpegTSBaseClass parent_class;
+ };
+
+ G_GNUC_INTERNAL GType gst_ts_demux_get_type (void);
+ GST_ELEMENT_REGISTER_DECLARE (tsdemux);
+
+ G_END_DECLS
+ #endif /* GST_TS_DEMUX_H */
--- /dev/null
- gst_caps_from_string (ALAW_CAPS));
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* Implementation of SMPTE 388M - Mapping A-Law coded audio into the MXF
+ * Generic Container
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <string.h>
+
+ #include "mxfalaw.h"
+ #include "mxfessence.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (mxf_debug);
+ #define GST_CAT_DEFAULT mxf_debug
+
+ static gboolean
+ mxf_is_alaw_essence_track (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, FALSE);
+
+ if (track->parent.descriptor == NULL)
+ return FALSE;
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ MXFMetadataFileDescriptor *d = track->parent.descriptor[i];
+ MXFUL *key;
+
+ if (!d)
+ continue;
+
+ key = &d->essence_container;
+ /* SMPTE 388M 6.1 */
+ if (mxf_is_generic_container_essence_container_label (key) &&
+ key->u[12] == 0x02 && key->u[13] == 0x0a &&
+ (key->u[14] == 0x01 || key->u[14] == 0x02 || key->u[14] == 0x03))
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ mxf_alaw_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
+ GstCaps * caps,
+ MXFMetadataTimelineTrack * track,
+ gpointer mapping_data, GstBuffer ** outbuf)
+ {
+ *outbuf = buffer;
+
+ /* SMPTE 388M 5.1 */
+ if (key->u[12] != 0x16 || (key->u[14] != 0x08 && key->u[14] != 0x09
+ && key->u[14] != 0x0a)) {
+ GST_ERROR ("Invalid A-Law essence element");
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFEssenceWrapping
+ mxf_alaw_get_track_wrapping (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (!MXF_IS_METADATA_GENERIC_SOUND_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[i]))
+ continue;
+
+ switch (track->parent.descriptor[i]->essence_container.u[14]) {
+ case 0x01:
+ return MXF_ESSENCE_WRAPPING_FRAME_WRAPPING;
+ break;
+ case 0x02:
+ return MXF_ESSENCE_WRAPPING_CLIP_WRAPPING;
+ break;
+ case 0x03:
+ default:
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ break;
+ }
+ }
+
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ static GstCaps *
+ mxf_alaw_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ MXFMetadataGenericSoundEssenceDescriptor *s = NULL;
+ guint i;
+ GstCaps *caps = NULL;
+
+ g_return_val_if_fail (track != NULL, NULL);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (MXF_IS_METADATA_GENERIC_SOUND_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[i])) {
+ s = (MXFMetadataGenericSoundEssenceDescriptor *) track->parent.
+ descriptor[i];
+ break;
+ }
+ }
+
+ if (!s) {
+ GST_ERROR ("No generic sound essence descriptor found for this track");
+ return NULL;
+ }
+
+ *handler = mxf_alaw_handle_essence_element;
+
+ if (s && s->audio_sampling_rate.n != 0 && s->audio_sampling_rate.d != 0 &&
+ s->channel_count != 0) {
+
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
+ mxf_metadata_generic_sound_essence_descriptor_set_caps (s, caps);
+
+ /* TODO: Handle channel layout somehow?
+ * Or is alaw limited to two channels? */
+ if (!*tags)
+ *tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (*tags, GST_TAG_MERGE_APPEND, GST_TAG_AUDIO_CODEC,
+ "A-law encoded audio", NULL);
+
+ }
+
+ *intra_only = TRUE;
+
+ return caps;
+ }
+
+ static const MXFEssenceElementHandler mxf_alaw_essence_element_handler = {
+ mxf_is_alaw_essence_track,
+ mxf_alaw_get_track_wrapping,
+ mxf_alaw_create_caps
+ };
+
+ typedef struct
+ {
+ guint64 error;
+ gint rate, channels;
+ MXFFraction edit_rate;
+ } ALawMappingData;
+
+ static GstFlowReturn
+ mxf_alaw_write_func (GstBuffer * buffer, gpointer mapping_data,
+ GstAdapter * adapter, GstBuffer ** outbuf, gboolean flush)
+ {
+ ALawMappingData *md = mapping_data;
+ guint bytes;
+ guint64 speu =
+ gst_util_uint64_scale (md->rate, md->edit_rate.d, md->edit_rate.n);
+
+ md->error += (md->edit_rate.d * md->rate) % (md->edit_rate.n);
+ if (md->error >= md->edit_rate.n) {
+ md->error = 0;
+ speu += 1;
+ }
+
+ bytes = speu * md->channels;
+
+ if (buffer)
+ gst_adapter_push (adapter, buffer);
+
+ if (gst_adapter_available (adapter) == 0)
+ return GST_FLOW_OK;
+
+ if (flush)
+ bytes = MIN (gst_adapter_available (adapter), bytes);
+
+ if (gst_adapter_available (adapter) >= bytes) {
+ *outbuf = gst_adapter_take_buffer (adapter, bytes);
+ }
+
+ if (gst_adapter_available (adapter) >= bytes)
+ return GST_FLOW_CUSTOM_SUCCESS;
+ else
+ return GST_FLOW_OK;
+ }
+
+ static const guint8 alaw_essence_container_ul[] = {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x03,
+ 0x0d, 0x01, 0x03, 0x01, 0x02, 0x0a, 0x01, 0x00
+ };
+
+ static const MXFUL mxf_sound_essence_compression_alaw =
+ { {0x06, 0x0E, 0x2B, 0x34, 0x04, 0x01, 0x01, 0x03, 0x04, 0x02, 0x02, 0x02,
+ 0x03, 0x01, 0x01, 0x00}
+ };
+
+ static MXFMetadataFileDescriptor *
+ mxf_alaw_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataGenericSoundEssenceDescriptor *ret;
+ GstStructure *s;
+ ALawMappingData *md;
+ gint rate, channels;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (strcmp (gst_structure_get_name (s), "audio/x-alaw") != 0 ||
+ !gst_structure_get_int (s, "rate", &rate) ||
+ !gst_structure_get_int (s, "channels", &channels)) {
+ GST_ERROR ("Invalid caps %" GST_PTR_FORMAT, caps);
+ return NULL;
+ }
+
+ ret = (MXFMetadataGenericSoundEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_GENERIC_SOUND_ESSENCE_DESCRIPTOR, NULL);
+
+ memcpy (&ret->parent.essence_container, &alaw_essence_container_ul, 16);
+ memcpy (&ret->sound_essence_compression, &mxf_sound_essence_compression_alaw,
+ 16);
+
+ if (!mxf_metadata_generic_sound_essence_descriptor_from_caps (ret, caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ *handler = mxf_alaw_write_func;
+
+ md = g_new0 (ALawMappingData, 1);
+ md->rate = rate;
+ md->channels = channels;
+ *mapping_data = md;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static void
+ mxf_alaw_update_descriptor (MXFMetadataFileDescriptor * d, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf)
+ {
+ return;
+ }
+
+ static void
+ mxf_alaw_get_edit_rate (MXFMetadataFileDescriptor * a, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf, MXFMetadataSourcePackage * package,
+ MXFMetadataTimelineTrack * track, MXFFraction * edit_rate)
+ {
+ guint i;
+ gdouble min = G_MAXDOUBLE;
+ ALawMappingData *md = mapping_data;
+
+ for (i = 0; i < package->parent.n_tracks; i++) {
+ MXFMetadataTimelineTrack *tmp;
+
+ if (!MXF_IS_METADATA_TIMELINE_TRACK (package->parent.tracks[i]) ||
+ package->parent.tracks[i] == (MXFMetadataTrack *) track)
+ continue;
+
+ tmp = MXF_METADATA_TIMELINE_TRACK (package->parent.tracks[i]);
+ if (((gdouble) tmp->edit_rate.n) / ((gdouble) tmp->edit_rate.d) < min) {
+ min = ((gdouble) tmp->edit_rate.n) / ((gdouble) tmp->edit_rate.d);
+ memcpy (edit_rate, &tmp->edit_rate, sizeof (MXFFraction));
+ }
+ }
+
+ if (min == G_MAXDOUBLE) {
+ /* 100ms edit units */
+ edit_rate->n = 10;
+ edit_rate->d = 1;
+ }
+
+ memcpy (&md->edit_rate, edit_rate, sizeof (MXFFraction));
+ }
+
+ static guint32
+ mxf_alaw_get_track_number_template (MXFMetadataFileDescriptor * a,
+ GstCaps * caps, gpointer mapping_data)
+ {
+ return (0x16 << 24) | (0x08 << 8);
+ }
+
+ static MXFEssenceElementWriter mxf_alaw_essence_element_writer = {
+ mxf_alaw_get_descriptor,
+ mxf_alaw_update_descriptor,
+ mxf_alaw_get_edit_rate,
+ mxf_alaw_get_track_number_template,
+ NULL,
+ {{0,}}
+ };
+
+ #define ALAW_CAPS \
+ "audio/x-alaw, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) [ 1, 2 ]"
+
+ void
+ mxf_alaw_init (void)
+ {
++ GstCaps *tmp = NULL;
+ mxf_essence_element_handler_register (&mxf_alaw_essence_element_handler);
+
+ mxf_alaw_essence_element_writer.pad_template =
+ gst_pad_template_new ("alaw_audio_sink_%u", GST_PAD_SINK, GST_PAD_REQUEST,
++ tmp = gst_caps_from_string (ALAW_CAPS));
++ gst_caps_unref (tmp);
+ memcpy (&mxf_alaw_essence_element_writer.data_definition,
+ mxf_metadata_track_identifier_get (MXF_METADATA_TRACK_SOUND_ESSENCE), 16);
+ mxf_essence_element_writer_register (&mxf_alaw_essence_element_writer);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-mxfdemux
+ * @title: mxfdemux
+ *
+ * mxfdemux demuxes an MXF file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/mxf ! mxfdemux ! audioconvert ! autoaudiosink
+ * ]| This pipeline demuxes an MXF file and outputs one of the contained raw audio streams.
+ *
+ */
+
+ /* TODO:
+ * - Handle timecode tracks correctly (where is this documented?)
+ * - Handle drop-frame field of timecode tracks
+ * - Handle Generic container system items
+ * - Post structural metadata and descriptive metadata trees as a message on the bus
+ * and send them downstream as event.
+ * - Multichannel audio needs channel layouts, define them (SMPTE S320M?).
+ * - Correctly handle the different rectangles and aspect-ratio for video
+ * - Add more support for non-standard MXF used by Avid (bug #561922).
+ * - Fix frame layout stuff, i.e. interlaced/progressive
+ * - In pull mode first find the first buffer for every pad before pushing
+ * to prevent jumpy playback in the beginning due to resynchronization.
+ *
+ * - Implement SMPTE D11 essence and the digital cinema/MXF specs
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstmxfelements.h"
+ #include "mxfdemux.h"
+ #include "mxfessence.h"
+
+ #include <string.h>
+
+ static GstStaticPadTemplate mxf_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/mxf")
+ );
+
+ static GstStaticPadTemplate mxf_src_template =
+ GST_STATIC_PAD_TEMPLATE ("track_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ GST_DEBUG_CATEGORY_STATIC (mxfdemux_debug);
+ #define GST_CAT_DEFAULT mxfdemux_debug
+
+ /* Fill klv for the given offset, does not download the data */
+ static GstFlowReturn
+ gst_mxf_demux_peek_klv_packet (GstMXFDemux * demux, guint64 offset,
+ GstMXFKLV * klv);
+
+ /* Ensures the klv data is present. Pulls it if needed */
+ static GstFlowReturn
+ gst_mxf_demux_fill_klv (GstMXFDemux * demux, GstMXFKLV * klv);
+
+ /* Call when done with a klv. Will release the buffer (if any) and will update
+ * the demuxer offset position */
+ static void gst_mxf_demux_consume_klv (GstMXFDemux * demux, GstMXFKLV * klv);
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_index_table_segment (GstMXFDemux * demux, GstMXFKLV * klv);
+
+ static void collect_index_table_segments (GstMXFDemux * demux);
+ static gboolean find_entry_for_offset (GstMXFDemux * demux,
+ GstMXFDemuxEssenceTrack * etrack, guint64 offset,
+ GstMXFDemuxIndex * retentry);
+
+ GType gst_mxf_demux_pad_get_type (void);
+ G_DEFINE_TYPE (GstMXFDemuxPad, gst_mxf_demux_pad, GST_TYPE_PAD);
+
+ static void
+ gst_mxf_demux_pad_finalize (GObject * object)
+ {
+ GstMXFDemuxPad *pad = GST_MXF_DEMUX_PAD (object);
+
+ if (pad->tags) {
+ gst_tag_list_unref (pad->tags);
+ pad->tags = NULL;
+ }
+
+ G_OBJECT_CLASS (gst_mxf_demux_pad_parent_class)->finalize (object);
+ }
+
+ static void
+ gst_mxf_demux_pad_class_init (GstMXFDemuxPadClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = gst_mxf_demux_pad_finalize;
+ }
+
+ static void
+ gst_mxf_demux_pad_init (GstMXFDemuxPad * pad)
+ {
+ pad->position = 0;
+ pad->current_material_track_position = 0;
+ }
+
+ #define DEFAULT_MAX_DRIFT 100 * GST_MSECOND
+
+ enum
+ {
+ PROP_0,
+ PROP_PACKAGE,
+ PROP_MAX_DRIFT,
+ PROP_STRUCTURE
+ };
+
+ static gboolean gst_mxf_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_mxf_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_mxf_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+ #define gst_mxf_demux_parent_class parent_class
+ G_DEFINE_TYPE (GstMXFDemux, gst_mxf_demux, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (mxfdemux, "mxfdemux", GST_RANK_PRIMARY,
+ GST_TYPE_MXF_DEMUX, mxf_element_init (plugin));
+
+ static void
+ gst_mxf_demux_remove_pad (GstMXFDemuxPad * pad, GstMXFDemux * demux)
+ {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, GST_PAD_CAST (pad));
+ gst_element_remove_pad (GST_ELEMENT (demux), GST_PAD_CAST (pad));
+ }
+
+ static void
+ gst_mxf_demux_remove_pads (GstMXFDemux * demux)
+ {
+ g_ptr_array_foreach (demux->src, (GFunc) gst_mxf_demux_remove_pad, demux);
+ g_ptr_array_foreach (demux->src, (GFunc) gst_object_unref, NULL);
+ g_ptr_array_set_size (demux->src, 0);
+ }
+
+ static void
+ gst_mxf_demux_partition_free (GstMXFDemuxPartition * partition)
+ {
+ mxf_partition_pack_reset (&partition->partition);
+ mxf_primer_pack_reset (&partition->primer);
+
+ g_free (partition);
+ }
+
+ static void
+ gst_mxf_demux_reset_mxf_state (GstMXFDemux * demux)
+ {
+ guint i;
+
+ GST_DEBUG_OBJECT (demux, "Resetting MXF state");
+
+ g_list_foreach (demux->partitions, (GFunc) gst_mxf_demux_partition_free,
+ NULL);
+ g_list_free (demux->partitions);
+ demux->partitions = NULL;
+
+ demux->current_partition = NULL;
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (t->offsets)
+ g_array_free (t->offsets, TRUE);
+
+ g_free (t->mapping_data);
+
+ if (t->tags)
+ gst_tag_list_unref (t->tags);
+
+ if (t->caps)
+ gst_caps_unref (t->caps);
+ }
+ g_array_set_size (demux->essence_tracks, 0);
+ }
+
+ static void
+ gst_mxf_demux_reset_linked_metadata (GstMXFDemux * demux)
+ {
+ guint i;
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *pad = g_ptr_array_index (demux->src, i);
+
+ pad->material_track = NULL;
+ pad->material_package = NULL;
+ pad->current_component = NULL;
+ }
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *track =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ track->source_package = NULL;
+ track->delta_id = -1;
+ track->source_track = NULL;
+ }
+
+ demux->current_package = NULL;
+ }
+
+ static void
+ gst_mxf_demux_reset_metadata (GstMXFDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "Resetting metadata");
+
+ g_rw_lock_writer_lock (&demux->metadata_lock);
+
+ demux->update_metadata = TRUE;
+ demux->metadata_resolved = FALSE;
+
+ gst_mxf_demux_reset_linked_metadata (demux);
+
+ demux->preface = NULL;
+
+ if (demux->metadata) {
+ g_hash_table_destroy (demux->metadata);
+ }
+ demux->metadata = mxf_metadata_hash_table_new ();
+
+ if (demux->tags) {
+ gst_tag_list_unref (demux->tags);
+ demux->tags = NULL;
+ }
+
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+ }
+
+ static void
+ gst_mxf_demux_reset (GstMXFDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "cleaning up MXF demuxer");
+
+ demux->flushing = FALSE;
+
+ demux->state = GST_MXF_DEMUX_STATE_UNKNOWN;
+
+ demux->footer_partition_pack_offset = 0;
+ demux->offset = 0;
+
+ demux->pull_footer_metadata = TRUE;
+
+ demux->run_in = -1;
+
+ memset (&demux->current_package_uid, 0, sizeof (MXFUMID));
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ if (demux->close_seg_event) {
+ gst_event_unref (demux->close_seg_event);
+ demux->close_seg_event = NULL;
+ }
+
+ gst_adapter_clear (demux->adapter);
+
+ gst_mxf_demux_remove_pads (demux);
+
+ if (demux->random_index_pack) {
+ g_array_free (demux->random_index_pack, TRUE);
+ demux->random_index_pack = NULL;
+ }
+
+ if (demux->pending_index_table_segments) {
+ GList *l;
+
+ for (l = demux->pending_index_table_segments; l; l = l->next) {
+ MXFIndexTableSegment *s = l->data;
+ mxf_index_table_segment_reset (s);
+ g_free (s);
+ }
+ g_list_free (demux->pending_index_table_segments);
+ demux->pending_index_table_segments = NULL;
+ }
+
+ if (demux->index_tables) {
+ GList *l;
+
+ for (l = demux->index_tables; l; l = l->next) {
+ GstMXFDemuxIndexTable *t = l->data;
+ g_array_free (t->segments, TRUE);
+ g_array_free (t->reverse_temporal_offsets, TRUE);
+ g_free (t);
+ }
+ g_list_free (demux->index_tables);
+ demux->index_tables = NULL;
+ }
+
+ demux->index_table_segments_collected = FALSE;
+
+ gst_mxf_demux_reset_mxf_state (demux);
+ gst_mxf_demux_reset_metadata (demux);
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_pull_range (GstMXFDemux * demux, guint64 offset,
+ guint size, GstBuffer ** buffer)
+ {
+ GstFlowReturn ret;
+
+ ret = gst_pad_pull_range (demux->sinkpad, offset, size, buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_WARNING_OBJECT (demux,
+ "failed when pulling %u bytes from offset %" G_GUINT64_FORMAT ": %s",
+ size, offset, gst_flow_get_name (ret));
+ *buffer = NULL;
+ return ret;
+ }
+
+ if (G_UNLIKELY (*buffer && gst_buffer_get_size (*buffer) != size)) {
+ GST_WARNING_OBJECT (demux,
+ "partial pull got %" G_GSIZE_FORMAT " when expecting %u from offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (*buffer), size, offset);
+ gst_buffer_unref (*buffer);
+ ret = GST_FLOW_EOS;
+ *buffer = NULL;
+ return ret;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_mxf_demux_push_src_event (GstMXFDemux * demux, GstEvent * event)
+ {
+ gboolean ret = TRUE;
+ guint i;
+
+ GST_DEBUG_OBJECT (demux, "Pushing '%s' event downstream",
+ GST_EVENT_TYPE_NAME (event));
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *pad = GST_MXF_DEMUX_PAD (g_ptr_array_index (demux->src, i));
+
+ if (pad->eos && GST_EVENT_TYPE (event) == GST_EVENT_EOS)
+ continue;
+
+ ret |= gst_pad_push_event (GST_PAD_CAST (pad), gst_event_ref (event));
+ }
+
+ gst_event_unref (event);
+
+ return ret;
+ }
+
+ static GstMXFDemuxPad *
+ gst_mxf_demux_get_earliest_pad (GstMXFDemux * demux)
+ {
+ guint i;
+ GstClockTime earliest = GST_CLOCK_TIME_NONE;
+ GstMXFDemuxPad *pad = NULL;
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+
+ if (!p->eos && p->position < earliest) {
+ earliest = p->position;
+ pad = p;
+ }
+ }
+
+ return pad;
+ }
+
+ static gint
+ gst_mxf_demux_partition_compare (GstMXFDemuxPartition * a,
+ GstMXFDemuxPartition * b)
+ {
+ if (a->partition.this_partition < b->partition.this_partition)
+ return -1;
+ else if (a->partition.this_partition > b->partition.this_partition)
+ return 1;
+ else
+ return 0;
+ }
+
+ /* Final checks and variable calculation for tracks and partition. This function
+ * can be called repeatedly without any side-effect.
+ */
+ static void
+ gst_mxf_demux_partition_postcheck (GstMXFDemux * demux,
+ GstMXFDemuxPartition * partition)
+ {
+ guint i;
+ GstMXFDemuxPartition *old_partition = demux->current_partition;
+
+ /* If we already handled this partition or it doesn't contain any essence, skip */
+ if (partition->single_track || !partition->partition.body_sid)
+ return;
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *cand =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (cand->body_sid != partition->partition.body_sid)
+ continue;
+
+ if (!cand->source_package->is_interleaved) {
+ GST_DEBUG_OBJECT (demux,
+ "Assigning single track %d (0x%08x) to partition at offset %"
+ G_GUINT64_FORMAT, cand->track_id, cand->track_number,
+ partition->partition.this_partition);
+
+ partition->single_track = cand;
+
+ if (partition->essence_container_offset != 0
+ && cand->wrapping != MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ GstMXFKLV essence_klv;
+ GstMXFDemuxIndex entry;
+ /* Update the essence container offset for the fact that the index
+ * stream offset is relative to the essence position and not to the
+ * KLV position */
+ if (gst_mxf_demux_peek_klv_packet (demux,
+ partition->partition.this_partition +
+ partition->essence_container_offset,
+ &essence_klv) == GST_FLOW_OK) {
+ partition->essence_container_offset += essence_klv.data_offset;
+ /* And keep a copy of the clip/custom klv for this partition */
+ partition->clip_klv = essence_klv;
+ GST_DEBUG_OBJECT (demux,
+ "Non-frame wrapping, updated essence_container_offset to %"
+ G_GUINT64_FORMAT, partition->essence_container_offset);
+
+ /* And match it against index table, this will also update the track delta_id (if needed) */
+ demux->current_partition = partition;
+ find_entry_for_offset (demux, cand,
+ essence_klv.offset + essence_klv.data_offset, &entry);
+ demux->current_partition = old_partition;
+ }
+ }
+
+ break;
+ }
+ }
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_partition_pack (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ MXFPartitionPack partition;
+ GList *l;
+ GstMXFDemuxPartition *p = NULL;
+ GstMapInfo map;
+ gboolean ret;
+ GstFlowReturn flowret;
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling partition pack of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT, klv->length, klv->offset);
+
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *tmp = l->data;
+
+ if (tmp->partition.this_partition + demux->run_in == demux->offset &&
+ tmp->partition.major_version == 0x0001) {
+ GST_DEBUG_OBJECT (demux, "Partition already parsed");
+ p = tmp;
+ goto out;
+ }
+ }
+
+ flowret = gst_mxf_demux_fill_klv (demux, klv);
+ if (flowret != GST_FLOW_OK)
+ return flowret;
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ ret = mxf_partition_pack_parse (&klv->key, &partition, map.data, map.size);
+ gst_buffer_unmap (klv->data, &map);
+ if (!ret) {
+ GST_ERROR_OBJECT (demux, "Parsing partition pack failed");
+ return GST_FLOW_ERROR;
+ }
+
+ if (partition.this_partition != demux->offset + demux->run_in) {
+ GST_WARNING_OBJECT (demux,
+ "Partition with incorrect offset (this %" G_GUINT64_FORMAT
+ " demux offset %" G_GUINT64_FORMAT " run_in:%" G_GUINT64_FORMAT ")",
+ partition.this_partition, demux->offset, demux->run_in);
+ partition.this_partition = demux->offset + demux->run_in;
+ }
+
+ if (partition.type == MXF_PARTITION_PACK_HEADER)
+ demux->footer_partition_pack_offset = partition.footer_partition;
+
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *tmp = l->data;
+
+ if (tmp->partition.this_partition + demux->run_in == demux->offset) {
+ p = tmp;
+ break;
+ }
+ }
+
+ if (p) {
+ mxf_partition_pack_reset (&p->partition);
+ memcpy (&p->partition, &partition, sizeof (MXFPartitionPack));
+ } else {
+ p = g_new0 (GstMXFDemuxPartition, 1);
+ memcpy (&p->partition, &partition, sizeof (MXFPartitionPack));
+ demux->partitions =
+ g_list_insert_sorted (demux->partitions, p,
+ (GCompareFunc) gst_mxf_demux_partition_compare);
+ }
+
+ gst_mxf_demux_partition_postcheck (demux, p);
+
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *a, *b;
+
+ if (l->next == NULL)
+ break;
+
+ a = l->data;
+ b = l->next->data;
+
+ b->partition.prev_partition = a->partition.this_partition;
+ }
+
+ out:
+ GST_DEBUG_OBJECT (demux,
+ "Current partition now %p (body_sid:%d index_sid:%d this_partition:%"
+ G_GUINT64_FORMAT ")", p, p->partition.body_sid, p->partition.index_sid,
+ p->partition.this_partition);
+ demux->current_partition = p;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_primer_pack (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ GstMapInfo map;
+ gboolean ret;
+ GstFlowReturn flowret;
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling primer pack of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT, klv->length, klv->offset);
+
+ if (G_UNLIKELY (!demux->current_partition)) {
+ GST_ERROR_OBJECT (demux, "Primer pack before partition pack");
+ return GST_FLOW_ERROR;
+ }
+
+ if (G_UNLIKELY (demux->current_partition->primer.mappings)) {
+ GST_DEBUG_OBJECT (demux, "Primer pack already exists");
+ return GST_FLOW_OK;
+ }
+
+ flowret = gst_mxf_demux_fill_klv (demux, klv);
+ if (flowret != GST_FLOW_OK)
+ return flowret;
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ ret = mxf_primer_pack_parse (&klv->key, &demux->current_partition->primer,
+ map.data, map.size);
+ gst_buffer_unmap (klv->data, &map);
+ if (!ret) {
+ GST_ERROR_OBJECT (demux, "Parsing primer pack failed");
+ return GST_FLOW_ERROR;
+ }
+
+ demux->current_partition->primer.offset = demux->offset;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_resolve_references (GstMXFDemux * demux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GHashTableIter iter;
+ MXFMetadataBase *m = NULL;
+ GstStructure *structure;
+ guint i;
+
+ g_rw_lock_writer_lock (&demux->metadata_lock);
+
+ GST_DEBUG_OBJECT (demux, "Resolve metadata references");
+ demux->update_metadata = FALSE;
+
+ if (!demux->metadata) {
+ GST_ERROR_OBJECT (demux, "No metadata yet");
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+ return GST_FLOW_ERROR;
+ }
+
+ g_hash_table_iter_init (&iter, demux->metadata);
+ while (g_hash_table_iter_next (&iter, NULL, (gpointer) & m)) {
+ m->resolved = MXF_METADATA_BASE_RESOLVE_STATE_NONE;
+ }
+
+ g_hash_table_iter_init (&iter, demux->metadata);
+ while (g_hash_table_iter_next (&iter, NULL, (gpointer) & m)) {
+ gboolean resolved;
+
+ resolved = mxf_metadata_base_resolve (m, demux->metadata);
+
+ /* Resolving can fail for anything but the preface, as the preface
+ * will resolve everything required */
+ if (!resolved && MXF_IS_METADATA_PREFACE (m)) {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ demux->metadata_resolved = TRUE;
+
+ structure =
+ mxf_metadata_base_to_structure (MXF_METADATA_BASE (demux->preface));
+ if (!demux->tags)
+ demux->tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (demux->tags, GST_TAG_MERGE_REPLACE, GST_TAG_MXF_STRUCTURE,
+ structure, NULL);
+
+ gst_structure_free (structure);
+
+ /* Check for quirks */
+ for (i = 0; i < demux->preface->n_identifications; i++) {
+ MXFMetadataIdentification *identification =
+ demux->preface->identifications[i];
+
+ GST_DEBUG_OBJECT (demux, "product:'%s' company:'%s'",
+ identification->product_name, identification->company_name);
+ if (!g_strcmp0 (identification->product_name, "MXFTk Advanced") &&
+ !g_strcmp0 (identification->company_name, "OpenCube") &&
+ identification->product_version.major <= 2 &&
+ identification->product_version.minor <= 0) {
+ GST_WARNING_OBJECT (demux,
+ "Setting up quirk for misuse of temporal_order field");
+ demux->temporal_order_misuse = TRUE;
+ }
+ }
+
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+
+ return ret;
+
+ error:
+ demux->metadata_resolved = FALSE;
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+
+ return ret;
+ }
+
+ static MXFMetadataGenericPackage *
+ gst_mxf_demux_find_package (GstMXFDemux * demux, const MXFUMID * umid)
+ {
+ MXFMetadataGenericPackage *ret = NULL;
+ guint i;
+
+ if (demux->preface->content_storage
+ && demux->preface->content_storage->packages) {
+ for (i = 0; i < demux->preface->content_storage->n_packages; i++) {
+ MXFMetadataGenericPackage *p =
+ demux->preface->content_storage->packages[i];
+
+ if (!p)
+ continue;
+
+ if (mxf_umid_is_equal (&p->package_uid, umid)) {
+ ret = p;
+ break;
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ static MXFMetadataGenericPackage *
+ gst_mxf_demux_choose_package (GstMXFDemux * demux)
+ {
+ MXFMetadataGenericPackage *ret = NULL;
+ guint i;
+
+ if (demux->requested_package_string) {
+ MXFUMID umid = { {0,}
+ };
+
+ if (!mxf_umid_from_string (demux->requested_package_string, &umid)) {
+ GST_ERROR_OBJECT (demux, "Invalid requested package");
+ }
+ g_free (demux->requested_package_string);
+ demux->requested_package_string = NULL;
+
+ ret = gst_mxf_demux_find_package (demux, &umid);
+ }
+
+ if (!ret && !mxf_umid_is_zero (&demux->current_package_uid))
+ ret = gst_mxf_demux_find_package (demux, &demux->current_package_uid);
+
+ if (ret && (MXF_IS_METADATA_MATERIAL_PACKAGE (ret)
+ || (MXF_IS_METADATA_SOURCE_PACKAGE (ret)
+ && MXF_METADATA_SOURCE_PACKAGE (ret)->top_level)))
+ goto done;
+ else if (ret)
+ GST_WARNING_OBJECT (demux,
+ "Current package is not a material package or top-level source package, choosing the first best");
+ else if (!mxf_umid_is_zero (&demux->current_package_uid))
+ GST_WARNING_OBJECT (demux,
+ "Current package not found, choosing the first best");
+
+ ret = demux->preface->primary_package;
+ if (ret && (MXF_IS_METADATA_MATERIAL_PACKAGE (ret)
+ || (MXF_IS_METADATA_SOURCE_PACKAGE (ret)
+ && MXF_METADATA_SOURCE_PACKAGE (ret)->top_level)))
+ goto done;
+ ret = NULL;
+
+ for (i = 0; i < demux->preface->content_storage->n_packages; i++) {
+ if (demux->preface->content_storage->packages[i] &&
+ MXF_IS_METADATA_MATERIAL_PACKAGE (demux->preface->content_storage->
+ packages[i])) {
+ ret =
+ MXF_METADATA_GENERIC_PACKAGE (demux->preface->content_storage->
+ packages[i]);
+ break;
+ }
+ }
+
+ if (!ret) {
+ GST_ERROR_OBJECT (demux, "No material package");
+ return NULL;
+ }
+
+ done:
+ if (mxf_umid_is_equal (&ret->package_uid, &demux->current_package_uid)) {
+ gchar current_package_string[96];
+
+ gst_mxf_demux_remove_pads (demux);
+ memcpy (&demux->current_package_uid, &ret->package_uid, 32);
+
+ mxf_umid_to_string (&ret->package_uid, current_package_string);
+ demux->current_package_string = g_strdup (current_package_string);
+ g_object_notify (G_OBJECT (demux), "package");
+
+ if (!demux->tags)
+ demux->tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (demux->tags, GST_TAG_MERGE_REPLACE, GST_TAG_MXF_UMID,
+ demux->current_package_string, NULL);
+ }
+ demux->current_package = ret;
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_update_essence_tracks (GstMXFDemux * demux)
+ {
+ guint i, j, k;
+
+ g_return_val_if_fail (demux->preface->content_storage, GST_FLOW_ERROR);
+ g_return_val_if_fail (demux->preface->content_storage->essence_container_data,
+ GST_FLOW_ERROR);
+
+ for (i = 0; i < demux->preface->content_storage->n_essence_container_data;
+ i++) {
+ MXFMetadataEssenceContainerData *edata;
+ MXFMetadataSourcePackage *package;
+ MXFFraction common_rate = { 0, 0 };
+
+ if (demux->preface->content_storage->essence_container_data[i] == NULL)
+ continue;
+
+ edata = demux->preface->content_storage->essence_container_data[i];
+
+ if (!edata->linked_package) {
+ GST_WARNING_OBJECT (demux, "Linked package not resolved");
+ continue;
+ }
+
+ package = edata->linked_package;
+
+ if (!package->parent.tracks) {
+ GST_WARNING_OBJECT (demux, "Linked package with no resolved tracks");
+ continue;
+ }
+
+ for (j = 0; j < package->parent.n_tracks; j++) {
+ MXFMetadataTimelineTrack *track;
+ GstMXFDemuxEssenceTrack *etrack = NULL;
+ GstCaps *caps = NULL;
+ gboolean new = FALSE;
+
+ if (!package->parent.tracks[j]
+ || !MXF_IS_METADATA_TIMELINE_TRACK (package->parent.tracks[j])) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping non-timeline track (id:%d number:0x%08x)",
+ package->parent.tracks[j]->track_id,
+ package->parent.tracks[j]->track_number);
+ continue;
+ }
+
+ track = MXF_METADATA_TIMELINE_TRACK (package->parent.tracks[j]);
+ if ((track->parent.type & 0xf0) != 0x30) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping track of type 0x%02x (id:%d number:0x%08x)",
+ track->parent.type, track->parent.track_id,
+ track->parent.track_number);
+ continue;
+ }
+
+ if (track->edit_rate.n <= 0 || track->edit_rate.d <= 0) {
+ GST_WARNING_OBJECT (demux, "Invalid edit rate");
+ continue;
+ }
+
+ if (package->is_interleaved) {
+ /*
+ * S377-1:2019 "9.4.2 The MXF timing model"
+ *
+ * The value of Edit Rate shall be identical for every timeline Essence
+ * Track of the Top-Level File Package.
+ *
+ * The value of Edit Rate of the timeline Essence Tracks of one
+ * Top-Level File Package need not match the Edit Rate of the Essence
+ * Tracks of the other Top-Level File Packages.
+ *
+ * S377-1:2019 "9.5.5 Top-Level File Packages"
+ *
+ *12. All Essence Tracks of a Top-Level File Package **shall** have the
+ * same value of Edit Rate. All other Tracks of a Top-Level File
+ * Package **should** have the same value of Edit Rate as the
+ * Essence Tracks.
+ */
+ if (common_rate.n == 0 && common_rate.d == 0) {
+ common_rate = track->edit_rate;
+ } else if (common_rate.n * track->edit_rate.d !=
+ common_rate.d * track->edit_rate.n) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE, (NULL),
+ ("Interleaved File Package doesn't have identical edit rate on all tracks."));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ for (k = 0; k < demux->essence_tracks->len; k++) {
+ GstMXFDemuxEssenceTrack *tmp =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ k);
+
+ if (tmp->track_number == track->parent.track_number &&
+ tmp->body_sid == edata->body_sid) {
+ if (tmp->track_id != track->parent.track_id ||
+ !mxf_umid_is_equal (&tmp->source_package_uid,
+ &package->parent.package_uid)) {
+ GST_ERROR_OBJECT (demux, "There already exists a different track "
+ "with this track number and body sid but a different source "
+ "or source track id -- ignoring");
+ continue;
+ }
+ etrack = tmp;
+ break;
+ }
+ }
+
+ if (!etrack) {
+ GstMXFDemuxEssenceTrack tmp;
+
+ memset (&tmp, 0, sizeof (tmp));
+ tmp.body_sid = edata->body_sid;
+ tmp.index_sid = edata->index_sid;
+ tmp.track_number = track->parent.track_number;
+ tmp.track_id = track->parent.track_id;
+ memcpy (&tmp.source_package_uid, &package->parent.package_uid, 32);
+
+ if (demux->current_partition->partition.body_sid == edata->body_sid &&
+ demux->current_partition->partition.body_offset == 0)
+ tmp.position = 0;
+ else
+ tmp.position = -1;
+
+ g_array_append_val (demux->essence_tracks, tmp);
+ etrack =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ demux->essence_tracks->len - 1);
+ new = TRUE;
+ }
+
+ etrack->source_package = NULL;
+ etrack->source_track = NULL;
+ etrack->delta_id = -1;
+
+ if (!track->parent.sequence) {
+ GST_WARNING_OBJECT (demux, "Source track has no sequence");
+ goto next;
+ }
+
+ if (track->parent.n_descriptor == 0) {
+ GST_WARNING_OBJECT (demux, "Source track has no descriptors");
+ goto next;
+ }
+
+ if (track->parent.sequence->duration > etrack->duration)
+ etrack->duration = track->parent.sequence->duration;
+
+ g_free (etrack->mapping_data);
+ etrack->mapping_data = NULL;
+ etrack->handler = NULL;
+ etrack->handle_func = NULL;
+ if (etrack->tags)
+ gst_tag_list_unref (etrack->tags);
+ etrack->tags = NULL;
+
+ etrack->handler = mxf_essence_element_handler_find (track);
+ if (!etrack->handler) {
+ gchar essence_container[48];
+ gchar essence_compression[48];
+ gchar *name;
+
+ GST_WARNING_OBJECT (demux,
+ "No essence element handler for track %u found", i);
+
+ mxf_ul_to_string (&track->parent.descriptor[0]->essence_container,
+ essence_container);
+
+ if (track->parent.type == MXF_METADATA_TRACK_PICTURE_ESSENCE) {
+ if (MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[0]))
+ mxf_ul_to_string (&MXF_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR
+ (track->parent.descriptor[0])->picture_essence_coding,
+ essence_compression);
+
+ name =
+ g_strdup_printf ("video/x-mxf-%s-%s", essence_container,
+ essence_compression);
+ } else if (track->parent.type == MXF_METADATA_TRACK_SOUND_ESSENCE) {
+ if (MXF_IS_METADATA_GENERIC_SOUND_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[0]))
+ mxf_ul_to_string (&MXF_METADATA_GENERIC_SOUND_ESSENCE_DESCRIPTOR
+ (track->parent.descriptor[0])->sound_essence_compression,
+ essence_compression);
+
+ name =
+ g_strdup_printf ("audio/x-mxf-%s-%s", essence_container,
+ essence_compression);
+ } else if (track->parent.type == MXF_METADATA_TRACK_DATA_ESSENCE) {
+ if (MXF_IS_METADATA_GENERIC_DATA_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[0]))
+ mxf_ul_to_string (&MXF_METADATA_GENERIC_DATA_ESSENCE_DESCRIPTOR
+ (track->parent.descriptor[0])->data_essence_coding,
+ essence_compression);
+
+ name =
+ g_strdup_printf ("application/x-mxf-%s-%s", essence_container,
+ essence_compression);
+ } else {
+ name = NULL;
+ g_assert_not_reached ();
+ }
+
+ caps = gst_caps_new_empty_simple (name);
+ g_free (name);
+ etrack->intra_only = FALSE;
+ } else {
+ caps =
+ etrack->handler->create_caps (track, &etrack->tags,
+ &etrack->intra_only, &etrack->handle_func, &etrack->mapping_data);
+ }
+
+ GST_DEBUG_OBJECT (demux, "Created caps %" GST_PTR_FORMAT, caps);
+
+ if (!caps && new) {
+ GST_WARNING_OBJECT (demux, "No caps created, ignoring stream");
+ g_free (etrack->mapping_data);
+ etrack->mapping_data = NULL;
+ if (etrack->tags)
+ gst_tag_list_unref (etrack->tags);
+ etrack->tags = NULL;
+ goto next;
+ } else if (!caps) {
+ GST_WARNING_OBJECT (demux, "Couldn't create updated caps for stream");
+ } else if (!etrack->caps || !gst_caps_is_equal (etrack->caps, caps)) {
+ if (etrack->caps)
+ gst_caps_unref (etrack->caps);
+ etrack->caps = caps;
+ } else {
+ gst_caps_unref (caps);
+ caps = NULL;
+ }
+
+ etrack->min_edit_units = 1;
+ /* Ensure we don't output one buffer per sample for audio */
+ if (gst_util_uint64_scale (GST_SECOND, track->edit_rate.d,
+ track->edit_rate.n) < 10 * GST_MSECOND) {
+ GstStructure *s = gst_caps_get_structure (etrack->caps, 0);
+ const gchar *name = gst_structure_get_name (s);
+ if (g_str_has_prefix (name, "audio/x-raw")) {
+ etrack->min_edit_units =
+ gst_util_uint64_scale (25 * GST_MSECOND, track->edit_rate.n,
+ track->edit_rate.d * GST_SECOND);
+ GST_DEBUG_OBJECT (demux, "Seting miminum number of edit units to %u",
+ etrack->min_edit_units);
+ }
+ }
+
+ /* FIXME : We really should just abort/ignore the stream completely if we
+ * don't have a handler for it */
+ if (etrack->handler != NULL)
+ etrack->wrapping = etrack->handler->get_track_wrapping (track);
+ else
+ etrack->wrapping = MXF_ESSENCE_WRAPPING_UNKNOWN_WRAPPING;
+
+ if (package->is_interleaved) {
+ GST_DEBUG_OBJECT (demux,
+ "track comes from interleaved source package with %d track(s), setting delta_id to -1",
+ package->parent.n_tracks);
+ if (etrack->wrapping != MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE, (NULL),
+ ("Non-frame-wrapping is not allowed in interleaved File Package."));
+ return GST_FLOW_ERROR;
+ }
+ etrack->delta_id = MXF_INDEX_DELTA_ID_UNKNOWN;
+ } else {
+ etrack->delta_id = MXF_INDEX_DELTA_ID_UNKNOWN;
+ }
+ etrack->source_package = package;
+ etrack->source_track = track;
+ continue;
+
+ next:
+ if (new) {
+ g_free (etrack->mapping_data);
+ if (etrack->tags)
+ gst_tag_list_unref (etrack->tags);
+ if (etrack->caps)
+ gst_caps_unref (etrack->caps);
+
+ g_array_remove_index (demux->essence_tracks,
+ demux->essence_tracks->len - 1);
+ }
+ }
+ }
+
+ if (demux->essence_tracks->len == 0) {
+ GST_ERROR_OBJECT (demux, "No valid essence tracks in this file");
+ return GST_FLOW_ERROR;
+ }
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *etrack =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (!etrack->source_package || !etrack->source_track || !etrack->caps) {
+ GST_ERROR_OBJECT (demux, "Failed to update essence track %u", i);
+ return GST_FLOW_ERROR;
+ }
+
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFMetadataEssenceContainerData *
+ essence_container_for_source_package (MXFMetadataContentStorage * storage,
+ MXFMetadataSourcePackage * package)
+ {
+ guint i;
+
+ for (i = 0; i < storage->n_essence_container_data; i++) {
+ MXFMetadataEssenceContainerData *cont = storage->essence_container_data[i];
+ if (cont && cont->linked_package == package)
+ return cont;
+ }
+
+ return NULL;
+ }
+
+ static void
+ gst_mxf_demux_show_topology (GstMXFDemux * demux)
+ {
+ GList *material_packages = NULL;
+ GList *file_packages = NULL;
+ GList *tmp;
+ MXFMetadataContentStorage *storage = demux->preface->content_storage;
+ guint i;
+ gchar str[96];
+
+ /* Show the topology starting from the preface */
+ GST_DEBUG_OBJECT (demux, "Topology");
+
+ for (i = 0; i < storage->n_packages; i++) {
+ MXFMetadataGenericPackage *pack = storage->packages[i];
+ if (MXF_IS_METADATA_MATERIAL_PACKAGE (pack))
+ material_packages = g_list_append (material_packages, pack);
+ else if (MXF_IS_METADATA_SOURCE_PACKAGE (pack))
+ file_packages = g_list_append (file_packages, pack);
+ else
+ GST_DEBUG_OBJECT (demux, "Unknown package type");
+ }
+
+ GST_DEBUG_OBJECT (demux, "Number of Material Package (i.e. output) : %d",
+ g_list_length (material_packages));
+ for (tmp = material_packages; tmp; tmp = tmp->next) {
+ MXFMetadataMaterialPackage *pack = (MXFMetadataMaterialPackage *) tmp->data;
+ GST_DEBUG_OBJECT (demux, " Package with %d tracks , UID:%s",
+ pack->n_tracks, mxf_umid_to_string (&pack->package_uid, str));
+ for (i = 0; i < pack->n_tracks; i++) {
+ MXFMetadataTrack *track = pack->tracks[i];
+ if (track == NULL) {
+ GST_DEBUG_OBJECT (demux, " Unknown/Unhandled track UUID %s",
+ mxf_uuid_to_string (&pack->tracks_uids[i], str));
+ } else if (MXF_IS_METADATA_TIMELINE_TRACK (track)) {
+ MXFMetadataTimelineTrack *mtrack = (MXFMetadataTimelineTrack *) track;
+ GST_DEBUG_OBJECT (demux,
+ " Timeline Track id:%d number:0x%08x name:`%s` edit_rate:%d/%d origin:%"
+ G_GINT64_FORMAT, track->track_id, track->track_number,
+ track->track_name, mtrack->edit_rate.n, mtrack->edit_rate.d,
+ mtrack->origin);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ " Non-Timeline-Track id:%d number:0x%08x name:`%s`",
+ track->track_id, track->track_number, track->track_name);
+ }
+ if (track) {
+ MXFMetadataSequence *sequence = track->sequence;
+ guint si;
+ GST_DEBUG_OBJECT (demux,
+ " Sequence duration:%" G_GINT64_FORMAT
+ " n_structural_components:%d", sequence->duration,
+ sequence->n_structural_components);
+ for (si = 0; si < sequence->n_structural_components; si++) {
+ MXFMetadataStructuralComponent *comp =
+ sequence->structural_components[si];
+ GST_DEBUG_OBJECT (demux,
+ " Component #%d duration:%" G_GINT64_FORMAT, si,
+ comp->duration);
+ if (MXF_IS_METADATA_SOURCE_CLIP (comp)) {
+ MXFMetadataSourceClip *clip = (MXFMetadataSourceClip *) comp;
+ GST_DEBUG_OBJECT (demux,
+ " Clip start_position:%" G_GINT64_FORMAT
+ " source_track_id:%d source_package_id:%s",
+ clip->start_position, clip->source_track_id,
+ mxf_umid_to_string (&clip->source_package_id, str));
+ }
+ }
+
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "Number of File Packages (i.e. input) : %d",
+ g_list_length (file_packages));
+ for (tmp = file_packages; tmp; tmp = tmp->next) {
+ MXFMetadataMaterialPackage *pack = (MXFMetadataMaterialPackage *) tmp->data;
+ MXFMetadataSourcePackage *src = (MXFMetadataSourcePackage *) pack;
+ MXFMetadataEssenceContainerData *econt =
+ essence_container_for_source_package (storage, src);
+ GST_DEBUG_OBJECT (demux,
+ " Package (body_sid:%d index_sid:%d top_level:%d) with %d tracks , UID:%s",
+ econt->body_sid, econt->index_sid, src->top_level, pack->n_tracks,
+ mxf_umid_to_string (&pack->package_uid, str));
+ GST_DEBUG_OBJECT (demux, " Package descriptor : %s",
+ g_type_name (G_OBJECT_TYPE (src->descriptor)));
+ for (i = 0; i < pack->n_tracks; i++) {
+ MXFMetadataTrack *track = pack->tracks[i];
+ MXFMetadataSequence *sequence = track->sequence;
+ guint di, si;
+ if (MXF_IS_METADATA_TIMELINE_TRACK (track)) {
+ MXFMetadataTimelineTrack *mtrack = (MXFMetadataTimelineTrack *) track;
+ GST_DEBUG_OBJECT (demux,
+ " Timeline Track id:%d number:0x%08x name:`%s` edit_rate:%d/%d origin:%"
+ G_GINT64_FORMAT, track->track_id, track->track_number,
+ track->track_name, mtrack->edit_rate.n, mtrack->edit_rate.d,
+ mtrack->origin);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ " Non-Timeline-Track id:%d number:0x%08x name:`%s` type:0x%x",
+ track->track_id, track->track_number, track->track_name,
+ track->type);
+ }
+ for (di = 0; di < track->n_descriptor; di++) {
+ MXFMetadataFileDescriptor *desc = track->descriptor[di];
+ GST_DEBUG_OBJECT (demux, " Descriptor %s %s",
+ g_type_name (G_OBJECT_TYPE (desc)),
+ mxf_ul_to_string (&desc->essence_container, str));
+ }
+ GST_DEBUG_OBJECT (demux,
+ " Sequence duration:%" G_GINT64_FORMAT
+ " n_structural_components:%d", sequence->duration,
+ sequence->n_structural_components);
+ for (si = 0; si < sequence->n_structural_components; si++) {
+ MXFMetadataStructuralComponent *comp =
+ sequence->structural_components[si];
+ GST_DEBUG_OBJECT (demux,
+ " Component #%d duration:%" G_GINT64_FORMAT, si,
+ comp->duration);
+ }
+ }
+ }
+
+ g_list_free (material_packages);
+ g_list_free (file_packages);
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_update_tracks (GstMXFDemux * demux)
+ {
+ MXFMetadataGenericPackage *current_package = NULL;
+ guint i, j, k;
+ gboolean first_run;
+ guint component_index;
+ GstFlowReturn ret;
+ GList *pads = NULL, *l;
+ GstVideoTimeCode start_timecode = GST_VIDEO_TIME_CODE_INIT;
+
+ g_rw_lock_writer_lock (&demux->metadata_lock);
+ GST_DEBUG_OBJECT (demux, "Updating tracks");
+
+ gst_mxf_demux_show_topology (demux);
+
+ if ((ret = gst_mxf_demux_update_essence_tracks (demux)) != GST_FLOW_OK) {
+ goto error;
+ }
+
+ current_package = gst_mxf_demux_choose_package (demux);
+
+ if (!current_package) {
+ GST_ERROR_OBJECT (demux, "Unable to find current package");
+ ret = GST_FLOW_ERROR;
+ goto error;
+ } else if (!current_package->tracks) {
+ GST_ERROR_OBJECT (demux, "Current package has no (resolved) tracks");
+ ret = GST_FLOW_ERROR;
+ goto error;
+ } else if (!current_package->n_essence_tracks) {
+ GST_ERROR_OBJECT (demux, "Current package has no essence tracks");
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+
+ first_run = (demux->src->len == 0);
+
+ /* For material packages, there must be one timecode track with one
+ * continuous timecode. For source packages there might be multiple,
+ * discontinuous timecode components.
+ * TODO: Support multiple timecode components
+ */
+ for (i = 0; i < current_package->n_tracks; i++) {
+ MXFMetadataTimelineTrack *track = NULL;
+ MXFMetadataSequence *sequence = NULL;
+ MXFMetadataTimecodeComponent *component = NULL;
+
+ if (!current_package->tracks[i]) {
+ GST_WARNING_OBJECT (demux, "Unresolved track");
+ continue;
+ }
+
+ if (!MXF_IS_METADATA_TIMELINE_TRACK (current_package->tracks[i])) {
+ GST_DEBUG_OBJECT (demux, "Skipping Non-timeline track");
+ continue;
+ }
+
+
+ track = MXF_METADATA_TIMELINE_TRACK (current_package->tracks[i]);
+
+ if (!track->parent.sequence)
+ continue;
+ sequence = track->parent.sequence;
+ if (sequence->n_structural_components != 1 ||
+ !sequence->structural_components[0]
+ ||
+ !MXF_IS_METADATA_TIMECODE_COMPONENT (sequence->structural_components
+ [0]))
+ continue;
+
+ component =
+ MXF_METADATA_TIMECODE_COMPONENT (sequence->structural_components[0]);
+
+ /* Not a timecode track */
+ if (track->parent.type && (track->parent.type & 0xf0) != 0x10)
+ continue;
+
+ /* Main timecode track must have id 1, all others must be 0 */
+ if (track->parent.track_id != 1)
+ continue;
+
+ gst_video_time_code_init (&start_timecode, track->edit_rate.n,
+ track->edit_rate.d, NULL, (component->drop_frame
+ ?
+ GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME
+ : GST_VIDEO_TIME_CODE_FLAGS_NONE), 0, 0, 0, 0, 0);
+ gst_video_time_code_add_frames (&start_timecode, track->origin);
+ gst_video_time_code_add_frames (&start_timecode, component->start_timecode);
+ break;
+ }
+
+ for (i = 0; i < current_package->n_tracks; i++) {
+ MXFMetadataTimelineTrack *track = NULL;
+ MXFMetadataSequence *sequence;
+ MXFMetadataSourceClip *component = NULL;
+ MXFMetadataSourcePackage *source_package = NULL;
+ MXFMetadataTimelineTrack *source_track = NULL;
+ GstMXFDemuxEssenceTrack *etrack = NULL;
+ GstMXFDemuxPad *pad = NULL;
+ GstCaps *pad_caps;
+
+ GST_DEBUG_OBJECT (demux, "Handling track %u", i);
+
+ if (!current_package->tracks[i]) {
+ GST_WARNING_OBJECT (demux, "Unresolved track");
+ continue;
+ }
+
+ if (!MXF_IS_METADATA_TIMELINE_TRACK (current_package->tracks[i])) {
+ GST_DEBUG_OBJECT (demux, "No timeline track");
+ continue;
+ }
+
+ track = MXF_METADATA_TIMELINE_TRACK (current_package->tracks[i]);
+
+ if (!first_run) {
+ /* Find pad from track_id */
+ for (j = 0; j < demux->src->len; j++) {
+ GstMXFDemuxPad *tmp = g_ptr_array_index (demux->src, j);
+
+ if (tmp->track_id == track->parent.track_id) {
+ pad = tmp;
+ break;
+ }
+ }
+ }
+
+ if (pad)
+ component_index = pad->current_component_index;
+ else
+ component_index = 0;
+
+ if (!track->parent.sequence) {
+ GST_WARNING_OBJECT (demux, "Track with no sequence");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ sequence = track->parent.sequence;
+
+ if (MXF_IS_METADATA_SOURCE_PACKAGE (current_package)) {
+ GST_DEBUG_OBJECT (demux, "Playing source package");
+
+ component = NULL;
+ source_package = MXF_METADATA_SOURCE_PACKAGE (current_package);
+ source_track = track;
+ } else if (sequence->structural_components
+ &&
+ MXF_IS_METADATA_SOURCE_CLIP (sequence->structural_components
+ [component_index])) {
+ GST_DEBUG_OBJECT (demux, "Playing material package");
+
+ component =
+ MXF_METADATA_SOURCE_CLIP (sequence->structural_components
+ [component_index]);
+ if (!component) {
+ GST_WARNING_OBJECT (demux, "NULL component in non-source package");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (component->source_package && component->source_package->top_level &&
+ MXF_METADATA_GENERIC_PACKAGE (component->source_package)->tracks) {
+ MXFMetadataGenericPackage *tmp_pkg =
+ MXF_METADATA_GENERIC_PACKAGE (component->source_package);
+
+ source_package = component->source_package;
+
+ for (k = 0; k < tmp_pkg->n_tracks; k++) {
+ MXFMetadataTrack *tmp = tmp_pkg->tracks[k];
+
+ if (tmp->track_id == component->source_track_id) {
+ source_track = MXF_METADATA_TIMELINE_TRACK (tmp);
+ break;
+ }
+ }
+ }
+ }
+
+ if (track->parent.type && (track->parent.type & 0xf0) != 0x30) {
+ GST_DEBUG_OBJECT (demux,
+ "No essence track. type:0x%02x track_id:%d track_number:0x%08x",
+ track->parent.type, track->parent.track_id,
+ track->parent.track_number);
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (!source_package || track->parent.type == MXF_METADATA_TRACK_UNKNOWN
+ || !source_track) {
+ GST_WARNING_OBJECT (demux,
+ "No source package or track type for track found");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ for (k = 0; k < demux->essence_tracks->len; k++) {
+ GstMXFDemuxEssenceTrack *tmp =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, k);
+
+ if (tmp->source_package == source_package &&
+ tmp->source_track == source_track) {
+ etrack = tmp;
+ break;
+ }
+ }
+
+ if (!etrack) {
+ GST_WARNING_OBJECT (demux, "No essence track for this track found");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (track->edit_rate.n <= 0 || track->edit_rate.d <= 0 ||
+ source_track->edit_rate.n <= 0 || source_track->edit_rate.d <= 0) {
+ GST_WARNING_OBJECT (demux, "Track has an invalid edit rate");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (MXF_IS_METADATA_MATERIAL_PACKAGE (current_package) && !component) {
+ GST_WARNING_OBJECT (demux,
+ "Playing material package but found no component for track");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (!source_package->descriptor) {
+ GST_WARNING_OBJECT (demux, "Source package has no descriptors");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (!source_track->parent.descriptor) {
+ GST_WARNING_OBJECT (demux, "No descriptor found for track");
+ if (!pad) {
+ continue;
+ } else {
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+
+ if (!pad && first_run) {
+ GstPadTemplate *templ;
+ gchar *pad_name;
+
+ templ =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (demux),
+ "track_%u");
+ pad_name = g_strdup_printf ("track_%u", track->parent.track_id);
+
+ g_assert (templ != NULL);
+
+ /* Create pad */
+ pad = (GstMXFDemuxPad *) g_object_new (GST_TYPE_MXF_DEMUX_PAD,
+ "name", pad_name, "direction", GST_PAD_SRC, "template", templ, NULL);
+ pad->need_segment = TRUE;
+ pad->eos = FALSE;
+ g_free (pad_name);
+
+ if (demux->tags)
+ pad->tags = gst_tag_list_copy (demux->tags);
+ }
+
+ if (!pad) {
+ GST_WARNING_OBJECT (demux,
+ "Not the first pad addition run, ignoring new track");
+ continue;
+ }
+
+ /* Update pad */
+ pad->track_id = track->parent.track_id;
+
+ pad->material_package = current_package;
+ pad->material_track = track;
+
+ pad->start_timecode = start_timecode;
+
+ /* If we just added the pad initialize for the current component */
+ if (first_run && MXF_IS_METADATA_MATERIAL_PACKAGE (current_package)) {
+ pad->current_component_index = 0;
+ pad->current_component_start = source_track->origin;
+ pad->current_component_start_position = 0;
+
+ if (component->parent.duration >= -1)
+ pad->current_component_duration = component->parent.duration;
+ else
+ pad->current_component_duration = -1;
+
+ if (track->edit_rate.n != source_track->edit_rate.n ||
+ track->edit_rate.d != source_track->edit_rate.d) {
+ pad->current_component_start +=
+ gst_util_uint64_scale (component->start_position,
+ source_track->edit_rate.n * track->edit_rate.d,
+ source_track->edit_rate.d * track->edit_rate.n);
+
+ if (pad->current_component_duration != -1)
+ pad->current_component_duration =
+ gst_util_uint64_scale (pad->current_component_duration,
+ source_track->edit_rate.n * track->edit_rate.d,
+ source_track->edit_rate.d * track->edit_rate.n);
+ } else {
+ pad->current_component_start += component->start_position;
+ }
+ pad->current_essence_track_position = pad->current_component_start;
+ }
+
+ /* NULL iff playing a source package */
+ pad->current_component = component;
+
+ pad->current_essence_track = etrack;
+
+ if (etrack->tags) {
+ if (pad->tags)
+ gst_tag_list_insert (pad->tags, etrack->tags, GST_TAG_MERGE_REPLACE);
+ else
+ pad->tags = gst_tag_list_copy (etrack->tags);
+ }
+
+ pad_caps = gst_pad_get_current_caps (GST_PAD_CAST (pad));
+ if (pad_caps && !gst_caps_is_equal (pad_caps, etrack->caps)) {
+ gst_pad_set_caps (GST_PAD_CAST (pad), etrack->caps);
+ } else if (!pad_caps) {
+ GstEvent *event;
+ gchar *stream_id;
+
+ gst_pad_set_event_function (GST_PAD_CAST (pad),
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_src_event));
+
+ gst_pad_set_query_function (GST_PAD_CAST (pad),
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_src_query));
+
+ gst_pad_use_fixed_caps (GST_PAD_CAST (pad));
+ gst_pad_set_active (GST_PAD_CAST (pad), TRUE);
+
+ stream_id =
+ gst_pad_create_stream_id_printf (GST_PAD_CAST (pad),
+ GST_ELEMENT_CAST (demux), "%03u", pad->track_id);
+
+ event =
+ gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (event, demux->group_id);
+
+ gst_pad_push_event (GST_PAD_CAST (pad), event);
+ g_free (stream_id);
+
+ gst_pad_set_caps (GST_PAD_CAST (pad), etrack->caps);
+
+ pads = g_list_prepend (pads, gst_object_ref (pad));
+
+ g_ptr_array_add (demux->src, pad);
+ pad->discont = TRUE;
+ }
+ if (pad_caps)
+ gst_caps_unref (pad_caps);
+ }
+
+ if (demux->src->len > 0) {
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *pad = g_ptr_array_index (demux->src, i);
+
+ if (!pad->material_track || !pad->material_package) {
+ GST_ERROR_OBJECT (demux, "Unable to update existing pad");
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+ }
+ } else {
+ GST_ERROR_OBJECT (demux, "Couldn't create any streams");
+ ret = GST_FLOW_ERROR;
+ goto error;
+ }
+
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+
+ for (l = pads; l; l = l->next) {
+ gst_flow_combiner_add_pad (demux->flowcombiner, l->data);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), l->data);
+ }
+ g_list_free (pads);
+
+ if (first_run)
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+
+ /* Re-check all existing partitions for source package linking in case the
+ * header partition contains data (allowed in early MXF versions) */
+ for (l = demux->partitions; l; l = l->next)
+ gst_mxf_demux_partition_postcheck (demux, (GstMXFDemuxPartition *) l->data);
+
+ return GST_FLOW_OK;
+
+ error:
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_metadata (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ guint16 type;
+ MXFMetadata *metadata = NULL, *old = NULL;
+ GstMapInfo map;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ type = GST_READ_UINT16_BE (&klv->key.u[13]);
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling metadata of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT " of type 0x%04x", klv->length, klv->offset, type);
+
+ if (G_UNLIKELY (!demux->current_partition)) {
+ GST_ERROR_OBJECT (demux, "Partition pack doesn't exist");
+ return GST_FLOW_ERROR;
+ }
+
+ if (G_UNLIKELY (!demux->current_partition->primer.mappings)) {
+ GST_ERROR_OBJECT (demux, "Primer pack doesn't exists");
+ return GST_FLOW_ERROR;
+ }
+
+ if (demux->current_partition->parsed_metadata) {
+ GST_DEBUG_OBJECT (demux, "Metadata of this partition was already parsed");
+ return GST_FLOW_OK;
+ }
+
+ if (klv->length == 0)
+ return GST_FLOW_OK;
+ ret = gst_mxf_demux_fill_klv (demux, klv);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ metadata =
+ mxf_metadata_new (type, &demux->current_partition->primer, demux->offset,
+ map.data, map.size);
+ gst_buffer_unmap (klv->data, &map);
+
+ if (!metadata) {
+ GST_WARNING_OBJECT (demux,
+ "Unknown or unhandled metadata of type 0x%04x", type);
+ return GST_FLOW_OK;
+ }
+
+ old =
+ g_hash_table_lookup (demux->metadata,
+ &MXF_METADATA_BASE (metadata)->instance_uid);
+
+ if (old && G_TYPE_FROM_INSTANCE (old) != G_TYPE_FROM_INSTANCE (metadata)) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ GST_DEBUG_OBJECT (demux,
+ "Metadata with instance uid %s already exists and has different type '%s',"
+ " expected '%s'",
+ mxf_uuid_to_string (&MXF_METADATA_BASE (metadata)->instance_uid, str),
+ g_type_name (G_TYPE_FROM_INSTANCE (old)),
+ g_type_name (G_TYPE_FROM_INSTANCE (metadata)));
+ g_object_unref (metadata);
+ return GST_FLOW_ERROR;
+ } else if (old
+ && MXF_METADATA_BASE (old)->offset >=
+ MXF_METADATA_BASE (metadata)->offset) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ GST_DEBUG_OBJECT (demux,
+ "Metadata with instance uid %s already exists and is newer",
+ mxf_uuid_to_string (&MXF_METADATA_BASE (metadata)->instance_uid, str));
+ g_object_unref (metadata);
+ return GST_FLOW_OK;
+ }
+
+ g_rw_lock_writer_lock (&demux->metadata_lock);
+ demux->update_metadata = TRUE;
+
+ if (MXF_IS_METADATA_PREFACE (metadata)) {
+ demux->preface = MXF_METADATA_PREFACE (metadata);
+ }
+
+ gst_mxf_demux_reset_linked_metadata (demux);
+
+ g_hash_table_replace (demux->metadata,
+ &MXF_METADATA_BASE (metadata)->instance_uid, metadata);
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_descriptive_metadata (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ guint32 type;
+ guint8 scheme;
+ GstMapInfo map;
+ GstFlowReturn ret = GST_FLOW_OK;
+ MXFDescriptiveMetadata *m = NULL, *old = NULL;
+
+ scheme = GST_READ_UINT8 (&klv->key.u[12]);
+ type = GST_READ_UINT24_BE (&klv->key.u[13]);
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling descriptive metadata of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT " with scheme 0x%02x and type 0x%06x",
+ klv->length, klv->offset, scheme, type);
+
+ if (G_UNLIKELY (!demux->current_partition)) {
+ GST_ERROR_OBJECT (demux, "Partition pack doesn't exist");
+ return GST_FLOW_ERROR;
+ }
+
+ if (G_UNLIKELY (!demux->current_partition->primer.mappings)) {
+ GST_ERROR_OBJECT (demux, "Primer pack doesn't exists");
+ return GST_FLOW_ERROR;
+ }
+
+ if (demux->current_partition->parsed_metadata) {
+ GST_DEBUG_OBJECT (demux, "Metadata of this partition was already parsed");
+ return GST_FLOW_OK;
+ }
+
+ ret = gst_mxf_demux_fill_klv (demux, klv);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ m = mxf_descriptive_metadata_new (scheme, type,
+ &demux->current_partition->primer, demux->offset, map.data, map.size);
+ gst_buffer_unmap (klv->data, &map);
+
+ if (!m) {
+ GST_WARNING_OBJECT (demux,
+ "Unknown or unhandled descriptive metadata of scheme 0x%02x and type 0x%06x",
+ scheme, type);
+ return GST_FLOW_OK;
+ }
+
+ old =
+ g_hash_table_lookup (demux->metadata,
+ &MXF_METADATA_BASE (m)->instance_uid);
+
+ if (old && G_TYPE_FROM_INSTANCE (old) != G_TYPE_FROM_INSTANCE (m)) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ GST_DEBUG_OBJECT (demux,
+ "Metadata with instance uid %s already exists and has different type '%s',"
+ " expected '%s'",
+ mxf_uuid_to_string (&MXF_METADATA_BASE (m)->instance_uid, str),
+ g_type_name (G_TYPE_FROM_INSTANCE (old)),
+ g_type_name (G_TYPE_FROM_INSTANCE (m)));
+ g_object_unref (m);
+ return GST_FLOW_ERROR;
+ } else if (old
+ && MXF_METADATA_BASE (old)->offset >= MXF_METADATA_BASE (m)->offset) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ GST_DEBUG_OBJECT (demux,
+ "Metadata with instance uid %s already exists and is newer",
+ mxf_uuid_to_string (&MXF_METADATA_BASE (m)->instance_uid, str));
+ g_object_unref (m);
+ return GST_FLOW_OK;
+ }
+
+ g_rw_lock_writer_lock (&demux->metadata_lock);
+
+ demux->update_metadata = TRUE;
+ gst_mxf_demux_reset_linked_metadata (demux);
+
+ g_hash_table_replace (demux->metadata, &MXF_METADATA_BASE (m)->instance_uid,
+ m);
+
+ g_rw_lock_writer_unlock (&demux->metadata_lock);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_generic_container_system_item (GstMXFDemux * demux,
+ GstMXFKLV * klv)
+ {
+ GST_DEBUG_OBJECT (demux,
+ "Handling generic container system item of size %" G_GSIZE_FORMAT
+ " at offset %" G_GUINT64_FORMAT, klv->length, klv->offset);
+
+ if (demux->current_partition->essence_container_offset == 0)
+ demux->current_partition->essence_container_offset =
+ demux->offset - demux->current_partition->partition.this_partition -
+ demux->run_in;
+
+ /* TODO: parse this */
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_pad_set_component (GstMXFDemux * demux, GstMXFDemuxPad * pad,
+ guint i)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstCaps *pad_caps;
+ MXFMetadataSequence *sequence;
+ guint k;
+ MXFMetadataSourcePackage *source_package = NULL;
+ MXFMetadataTimelineTrack *source_track = NULL;
+ gboolean update = (pad->current_component_index != i);
+
+ pad->current_component_index = i;
+
+ sequence = pad->material_track->parent.sequence;
+
+ if (pad->current_component_index >= sequence->n_structural_components) {
+ GST_DEBUG_OBJECT (demux, "After last structural component");
+ pad->current_component_index = sequence->n_structural_components - 1;
+ ret = GST_FLOW_EOS;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Switching to component %u",
+ pad->current_component_index);
+
+ pad->current_component =
+ MXF_METADATA_SOURCE_CLIP (sequence->structural_components[pad->
+ current_component_index]);
+ if (pad->current_component == NULL) {
+ GST_ERROR_OBJECT (demux, "No such structural component");
+ return GST_FLOW_ERROR;
+ }
+
+ if (!pad->current_component->source_package
+ || !pad->current_component->source_package->top_level
+ || !MXF_METADATA_GENERIC_PACKAGE (pad->current_component->
+ source_package)->tracks) {
+ GST_ERROR_OBJECT (demux, "Invalid component");
+ return GST_FLOW_ERROR;
+ }
+
+ source_package = pad->current_component->source_package;
+
+ for (k = 0; k < source_package->parent.n_tracks; k++) {
+ MXFMetadataTrack *tmp = source_package->parent.tracks[k];
+
+ if (tmp->track_id == pad->current_component->source_track_id) {
+ source_track = MXF_METADATA_TIMELINE_TRACK (tmp);
+ break;
+ }
+ }
+
+ if (!source_track) {
+ GST_ERROR_OBJECT (demux, "No source track found");
+ return GST_FLOW_ERROR;
+ }
+
+ pad->current_essence_track = NULL;
+
+ for (k = 0; k < demux->essence_tracks->len; k++) {
+ GstMXFDemuxEssenceTrack *tmp =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, k);
+
+ if (tmp->source_package == source_package &&
+ tmp->source_track == source_track) {
+ pad->current_essence_track = tmp;
+ break;
+ }
+ }
+
+ if (!pad->current_essence_track) {
+ GST_ERROR_OBJECT (demux, "No corresponding essence track found");
+ return GST_FLOW_ERROR;
+ }
+
+ if (!source_package->descriptor) {
+ GST_ERROR_OBJECT (demux, "Source package has no descriptors");
+ return GST_FLOW_ERROR;
+ }
+
+ if (!source_track->parent.descriptor) {
+ GST_ERROR_OBJECT (demux, "No descriptor found for track");
+ return GST_FLOW_ERROR;
+ }
+
+ if (source_track->edit_rate.n <= 0 || source_track->edit_rate.d <= 0) {
+ GST_ERROR_OBJECT (demux, "Source track has invalid edit rate");
+ return GST_FLOW_ERROR;
+ }
+
+ pad->current_component_start_position = 0;
+ for (k = 0; k < i; k++) {
+ pad->current_component_start_position +=
+ MXF_METADATA_SOURCE_CLIP (sequence->structural_components[k])->
+ parent.duration;
+ }
+
+ if (pad->current_component->parent.duration >= -1)
+ pad->current_component_duration = pad->current_component->parent.duration;
+ else
+ pad->current_component_duration = -1;
+
+ if (pad->material_track->edit_rate.n != source_track->edit_rate.n ||
+ pad->material_track->edit_rate.d != source_track->edit_rate.d) {
+ pad->current_component_start +=
+ gst_util_uint64_scale (pad->current_component->start_position,
+ source_track->edit_rate.n * pad->material_track->edit_rate.d,
+ source_track->edit_rate.d * pad->material_track->edit_rate.n);
+
+ if (pad->current_component_duration != -1)
+ pad->current_component_duration =
+ gst_util_uint64_scale (pad->current_component_duration,
+ source_track->edit_rate.n * pad->material_track->edit_rate.d,
+ source_track->edit_rate.d * pad->material_track->edit_rate.n);
+ } else {
+ pad->current_component_start += pad->current_component->start_position;
+ }
+ pad->current_essence_track_position = pad->current_component_start;
+
+ pad_caps = gst_pad_get_current_caps (GST_PAD_CAST (pad));
+ if (!pad_caps
+ || !gst_caps_is_equal (pad_caps, pad->current_essence_track->caps)) {
+ gst_pad_set_caps (GST_PAD_CAST (pad), pad->current_essence_track->caps);
+ }
+ if (pad_caps)
+ gst_caps_unref (pad_caps);
+
+ if (update) {
+ if (pad->tags) {
+ if (pad->current_essence_track->tags)
+ gst_tag_list_insert (pad->tags, pad->current_essence_track->tags,
+ GST_TAG_MERGE_REPLACE);
+ } else {
+ if (pad->current_essence_track->tags)
+ pad->tags = gst_tag_list_copy (pad->current_essence_track->tags);
+ }
+ }
+
+ if (ret == GST_FLOW_EOS) {
+ pad->current_essence_track_position += pad->current_component_duration;
+ }
+
+ return ret;
+ }
+
+ /*
+ * Find the partition containing the stream offset of the given track
+ * */
+ static GstMXFDemuxPartition *
+ get_partition_for_stream_offset (GstMXFDemux * demux,
+ GstMXFDemuxEssenceTrack * etrack, guint64 stream_offset)
+ {
+ GList *tmp;
+ GstMXFDemuxPartition *offset_partition = NULL, *next_partition = NULL;
+
+ for (tmp = demux->partitions; tmp; tmp = tmp->next) {
+ GstMXFDemuxPartition *partition = tmp->data;
+
+ if (!next_partition && offset_partition)
+ next_partition = partition;
+
+ if (partition->partition.body_sid != etrack->body_sid)
+ continue;
+ if (partition->partition.body_offset > stream_offset)
+ break;
+
+ offset_partition = partition;
+ next_partition = NULL;
+ }
+
+ if (offset_partition
+ && stream_offset < offset_partition->partition.body_offset)
+ return NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "Found this_partition:%" G_GUINT64_FORMAT " body_offset:%"
+ G_GUINT64_FORMAT, offset_partition->partition.this_partition,
+ offset_partition->partition.body_offset);
+
+ /* Are we overriding into the next partition ? */
+ if (next_partition) {
+ guint64 partition_essence_size =
+ next_partition->partition.this_partition -
+ offset_partition->partition.this_partition +
+ offset_partition->essence_container_offset;
+ guint64 in_partition =
+ stream_offset - offset_partition->partition.body_offset;
+ GST_DEBUG_OBJECT (demux,
+ "Followed by this_partition:%" G_GUINT64_FORMAT " body_offset:%"
+ G_GUINT64_FORMAT, next_partition->partition.this_partition,
+ next_partition->partition.body_offset);
+
+ if (in_partition >= partition_essence_size) {
+ GST_WARNING_OBJECT (demux,
+ "stream_offset %" G_GUINT64_FORMAT
+ " in track body_sid:% index_sid:%d leaks into next unrelated partition (body_sid:%d / index_sid:%d)",
+ stream_offset, etrack->body_sid, etrack->index_sid,
+ next_partition->partition.body_sid,
+ next_partition->partition.index_sid);
+ return NULL;
+ }
+ }
+ return offset_partition;
+ }
+
+ static GstMXFDemuxIndexTable *
+ get_track_index_table (GstMXFDemux * demux, GstMXFDemuxEssenceTrack * etrack)
+ {
+ GList *l;
+
+ /* Look in the indextables */
+ for (l = demux->index_tables; l; l = l->next) {
+ GstMXFDemuxIndexTable *tmp = l->data;
+
+ if (tmp->body_sid == etrack->body_sid
+ && tmp->index_sid == etrack->index_sid) {
+ return tmp;
+ }
+ }
+
+ return NULL;
+ }
+
+ static guint32
+ get_track_max_temporal_offset (GstMXFDemux * demux,
+ GstMXFDemuxEssenceTrack * etrack)
+ {
+ GstMXFDemuxIndexTable *table;
+
+ if (etrack->intra_only)
+ return 0;
+
+ table = get_track_index_table (demux, etrack);
+
+ if (table)
+ return table->max_temporal_offset;
+ return 0;
+ }
+
+ static guint64
+ find_offset (GArray * offsets, gint64 * position, gboolean keyframe)
+ {
+ GstMXFDemuxIndex *idx;
+ guint64 current_offset = -1;
+ gint64 current_position = *position;
+
+ if (!offsets || offsets->len <= *position)
+ return -1;
+
+ idx = &g_array_index (offsets, GstMXFDemuxIndex, *position);
+ if (idx->offset != 0 && (!keyframe || idx->keyframe)) {
+ current_offset = idx->offset;
+ } else if (idx->offset != 0) {
+ current_position--;
+ while (current_position >= 0) {
+ GST_LOG ("current_position %" G_GINT64_FORMAT, current_position);
+ idx = &g_array_index (offsets, GstMXFDemuxIndex, current_position);
+ if (idx->offset == 0) {
+ GST_LOG ("breaking offset 0");
+ break;
+ } else if (!idx->keyframe) {
+ current_position--;
+ continue;
+ } else {
+ GST_LOG ("Breaking found offset");
+ current_offset = idx->offset;
+ break;
+ }
+ }
+ }
+
+ if (current_offset == -1)
+ return -1;
+
+ *position = current_position;
+ return current_offset;
+ }
+
+ /**
+ * find_edit_entry:
+ * @demux: The demuxer
+ * @etrack: The target essence track
+ * @position: An edit unit position
+ * @keyframe: if TRUE search for supporting keyframe
+ * @entry: (out): Will be filled with the matching entry information
+ *
+ * Finds the edit entry of @etrack for the given edit unit @position and fill
+ * @entry with the information about that edit entry. If @keyframe is TRUE, the
+ * supporting entry (i.e. keyframe) for the given position will be searched for.
+ *
+ * For frame-wrapped contents, the returned offset will be the position of the
+ * KLV of the content. For clip-wrapped content, the returned offset will be the
+ * position of the essence (i.e. without KLV header) and the entry will specify
+ * the size (in bytes).
+ *
+ * The returned entry will also specify the duration (in edit units) of the
+ * content, which can be different from 1 for special cases (such as raw audio
+ * where multiple samples could be aggregated).
+ *
+ * Returns: TRUE if the entry was found and @entry was properly filled, else
+ * FALSE.
+ */
+ static gboolean
+ find_edit_entry (GstMXFDemux * demux, GstMXFDemuxEssenceTrack * etrack,
+ gint64 position, gboolean keyframe, GstMXFDemuxIndex * entry)
+ {
+ GstMXFDemuxIndexTable *index_table = NULL;
+ guint i;
+ MXFIndexTableSegment *segment = NULL;
+ GstMXFDemuxPartition *offset_partition = NULL;
+ guint64 stream_offset = G_MAXUINT64, absolute_offset;
+
+ GST_DEBUG_OBJECT (demux,
+ "track %d body_sid:%d index_sid:%d delta_id:%d position:%" G_GINT64_FORMAT
+ " keyframe:%d", etrack->track_id, etrack->body_sid,
+ etrack->index_sid, etrack->delta_id, position, keyframe);
+
+ /* Default values */
+ entry->duration = 1;
+ /* By default every entry is a keyframe unless specified otherwise */
+ entry->keyframe = TRUE;
+
+ /* Look in the track offsets */
+ if (etrack->offsets && etrack->offsets->len > position) {
+ if (find_offset (etrack->offsets, &position, keyframe) != -1) {
+ *entry = g_array_index (etrack->offsets, GstMXFDemuxIndex, position);
+ GST_LOG_OBJECT (demux, "Found entry in track offsets");
+ return TRUE;
+ } else
+ GST_LOG_OBJECT (demux, "Didn't find entry in track offsets");
+ }
+
+ /* Look in the indextables */
+ index_table = get_track_index_table (demux, etrack);
+
+ if (!index_table) {
+ GST_DEBUG_OBJECT (demux,
+ "Couldn't find index table for body_sid:%d index_sid:%d",
+ etrack->body_sid, etrack->index_sid);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "Looking for position %" G_GINT64_FORMAT
+ " in index table (max temporal offset %u)",
+ etrack->position, index_table->max_temporal_offset);
+
+ /* Searching for a position in index tables works in 3 steps:
+ *
+ * 1. Figure out the table segment containing that position
+ * 2. Figure out the "stream offset" (and additional flags/timing) of that
+ * position from the table segment.
+ * 3. Figure out the "absolute offset" of that "stream offset" using partitions
+ */
+
+ search_in_segment:
+
+ /* Find matching index segment */
+ GST_DEBUG_OBJECT (demux, "Look for entry in %d segments",
+ index_table->segments->len);
+ for (i = 0; i < index_table->segments->len; i++) {
+ MXFIndexTableSegment *cand =
+ &g_array_index (index_table->segments, MXFIndexTableSegment, i);
+ if (position >= cand->index_start_position && (cand->index_duration == 0
+ || position <
+ (cand->index_start_position + cand->index_duration))) {
+ GST_DEBUG_OBJECT (demux,
+ "Entry is in Segment #%d , start: %" G_GINT64_FORMAT " , duration: %"
+ G_GINT64_FORMAT, i, cand->index_start_position, cand->index_duration);
+ segment = cand;
+ break;
+ }
+ }
+ if (!segment) {
+ GST_DEBUG_OBJECT (demux,
+ "Didn't find index table segment for position %" G_GINT64_FORMAT,
+ position);
+ return FALSE;
+ }
+
+ /* Were we asked for a keyframe ? */
+ if (keyframe) {
+ if (segment->edit_unit_byte_count && !segment->n_index_entries) {
+ GST_LOG_OBJECT (demux,
+ "Index table without entries, directly using requested position for keyframe search");
+ } else {
+ gint64 candidate;
+ GST_LOG_OBJECT (demux, "keyframe search");
+ /* Search backwards for keyframe */
+ for (candidate = position; candidate >= segment->index_start_position;
+ candidate--) {
+ MXFIndexEntry *segment_index_entry =
+ &segment->index_entries[candidate - segment->index_start_position];
+
+ /* Match */
+ if (segment_index_entry->flags & 0x80) {
+ GST_LOG_OBJECT (demux, "Found keyframe at position %" G_GINT64_FORMAT,
+ candidate);
+ position = candidate;
+ break;
+ }
+
+ /* If a keyframe offset is specified and valid, use that */
+ if (segment_index_entry->key_frame_offset
+ && !(segment_index_entry->flags & 0x08)) {
+ GST_DEBUG_OBJECT (demux, "Using keyframe offset %d",
+ segment_index_entry->key_frame_offset);
+ position = candidate + segment_index_entry->key_frame_offset;
+ if (position < segment->index_start_position) {
+ GST_DEBUG_OBJECT (demux, "keyframe info is in previous segment");
+ goto search_in_segment;
+ }
+ break;
+ }
+
+ /* If we reached the beginning, use that */
+ if (candidate == 0) {
+ GST_LOG_OBJECT (demux,
+ "Reached position 0 while searching for keyframe");
+ position = 0;
+ break;
+ }
+
+ /* If we looped past the beginning of this segment, go to the previous one */
+ if (candidate == segment->index_start_position) {
+ position = candidate - 1;
+ GST_LOG_OBJECT (demux, "Looping with new position %" G_GINT64_FORMAT,
+ position);
+ goto search_in_segment;
+ }
+
+ /* loop back to check previous entry */
+ }
+ }
+ }
+
+ /* Figure out the stream offset (also called "body offset" in specification) */
+ if (segment->edit_unit_byte_count && !segment->n_index_entries) {
+ /* Constant entry table. */
+ stream_offset = position * segment->edit_unit_byte_count;
+ if (etrack->delta_id >= 0) {
+ MXFDeltaEntry *delta_entry = &segment->delta_entries[etrack->delta_id];
+ GST_LOG_OBJECT (demux,
+ "Using delta %d pos_table_index:%d slice:%u element_delta:%u",
+ etrack->delta_id, delta_entry->pos_table_index, delta_entry->slice,
+ delta_entry->element_delta);
+ stream_offset += delta_entry->element_delta;
+ } else if (etrack->min_edit_units != 1) {
+ GST_LOG_OBJECT (demux, "Handling minimum edit unit %u",
+ etrack->min_edit_units);
+ entry->duration =
+ MIN (etrack->min_edit_units,
+ (segment->index_start_position + segment->index_duration) - position);
+ entry->size = segment->edit_unit_byte_count * entry->duration;
+ } else {
+ entry->size = segment->edit_unit_byte_count;
+ }
+ } else if (segment->n_index_entries) {
+ MXFIndexEntry *segment_index_entry;
+ MXFDeltaEntry *delta_entry = NULL;
+ g_assert (position <=
+ segment->index_start_position + segment->n_index_entries);
+ segment_index_entry =
+ &segment->index_entries[position - segment->index_start_position];
+ stream_offset = segment_index_entry->stream_offset;
+
+ if (segment->n_delta_entries > 0)
+ delta_entry = &segment->delta_entries[etrack->delta_id];
+
+ if (delta_entry) {
+ GST_LOG_OBJECT (demux,
+ "Using delta %d pos_table_index:%d slice:%u element_delta:%u",
+ etrack->delta_id, delta_entry->pos_table_index, delta_entry->slice,
+ delta_entry->element_delta);
+
+ /* Apply offset from slice/delta if needed */
+ if (delta_entry->slice)
+ stream_offset +=
+ segment_index_entry->slice_offset[delta_entry->slice - 1];
+ stream_offset += delta_entry->element_delta;
+ if (delta_entry->pos_table_index == -1) {
+ entry->keyframe = (segment_index_entry->flags & 0x80) == 0x80;
+ }
+ /* FIXME : Handle fractional offset position (delta_entry->pos_table_offset > 0) */
+ }
+
+ /* Apply reverse temporal reordering if present */
+ if (index_table->reordered_delta_entry == etrack->delta_id) {
+ if (position >= index_table->reverse_temporal_offsets->len) {
+ GST_WARNING_OBJECT (demux,
+ "Can't apply temporal offset for position %" G_GINT64_FORMAT
+ " (max:%d)", position, index_table->reverse_temporal_offsets->len);
+ }
+ if (demux->temporal_order_misuse) {
+ GST_DEBUG_OBJECT (demux, "Handling temporal order misuse");
+ entry->pts = position + segment_index_entry->temporal_offset;
+ } else {
+ entry->pts =
+ position + g_array_index (index_table->reverse_temporal_offsets,
+ gint8, position);
+ GST_LOG_OBJECT (demux,
+ "Applied temporal offset. dts:%" G_GINT64_FORMAT " pts:%"
+ G_GINT64_FORMAT, position, entry->pts);
+ }
+ } else
+ entry->pts = position;
+ } else {
+ /* Note : This should have been handled in the parser */
+ GST_WARNING_OBJECT (demux,
+ "Can't handle index tables without entries nor constant edit unit byte count");
+ return FALSE;
+ }
+
+ /* Find the partition containing the stream offset for this track */
+ offset_partition =
+ get_partition_for_stream_offset (demux, etrack, stream_offset);
+
+ if (!offset_partition) {
+ GST_WARNING_OBJECT (demux,
+ "Couldn't find matching partition for stream offset %" G_GUINT64_FORMAT,
+ stream_offset);
+ return FALSE;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Entry is in partition %" G_GUINT64_FORMAT,
+ offset_partition->partition.this_partition);
+ }
+
+ /* Convert stream offset to absolute offset using matching partition */
+ absolute_offset =
+ offset_partition->partition.this_partition +
+ offset_partition->essence_container_offset + (stream_offset -
+ offset_partition->partition.body_offset);
+
+ GST_LOG_OBJECT (demux,
+ "track %d position:%" G_GINT64_FORMAT " stream_offset %" G_GUINT64_FORMAT
+ " matches to absolute offset %" G_GUINT64_FORMAT, etrack->track_id,
+ position, stream_offset, absolute_offset);
+ entry->initialized = TRUE;
+ entry->offset = absolute_offset;
+ entry->dts = position;
+
+ return TRUE;
+ }
+
+ /**
+ * find_entry_for_offset:
+ * @demux: The demuxer
+ * @etrack: The target essence track
+ * @offset: An absolute byte offset (excluding run_in)
+ * @entry: (out): Will be filled with the matching entry information
+ *
+ * Find the entry located at the given absolute byte offset.
+ *
+ * Note: the offset requested should be in the current partition !
+ *
+ * Returns: TRUE if the entry was found and @entry was properly filled, else
+ * FALSE.
+ */
+ static gboolean
+ find_entry_for_offset (GstMXFDemux * demux, GstMXFDemuxEssenceTrack * etrack,
+ guint64 offset, GstMXFDemuxIndex * retentry)
+ {
+ GstMXFDemuxIndexTable *index_table = get_track_index_table (demux, etrack);
+ guint i;
+ MXFIndexTableSegment *index_segment = NULL;
+ GstMXFDemuxPartition *partition = demux->current_partition;
+ guint64 original_offset = offset;
+ guint64 cp_offset = 0; /* Offset in Content Package */
+ MXFIndexEntry *index_entry = NULL;
+ MXFDeltaEntry *delta_entry = NULL;
+ gint64 position = 0;
+
+ GST_DEBUG_OBJECT (demux,
+ "track %d body_sid:%d index_sid:%d offset:%" G_GUINT64_FORMAT,
+ etrack->track_id, etrack->body_sid, etrack->index_sid, offset);
+
+ /* Default value */
+ retentry->duration = 1;
+ retentry->keyframe = TRUE;
+
+ /* Index-less search */
+ if (etrack->offsets) {
+ for (i = 0; i < etrack->offsets->len; i++) {
+ GstMXFDemuxIndex *idx =
+ &g_array_index (etrack->offsets, GstMXFDemuxIndex, i);
+
+ if (idx->initialized && idx->offset != 0 && idx->offset == offset) {
+ *retentry = *idx;
+ GST_DEBUG_OBJECT (demux,
+ "Found in track index. Position:%" G_GINT64_FORMAT, idx->dts);
+ return TRUE;
+ }
+ }
+ }
+
+ /* Actual index search */
+ if (!index_table || !index_table->segments->len) {
+ GST_WARNING_OBJECT (demux, "No index table or entries to search in");
+ return FALSE;
+ }
+
+ if (!partition) {
+ GST_WARNING_OBJECT (demux, "No current partition for search");
+ return FALSE;
+ }
+
+ /* Searching for a stream position from an absolute offset works in 3 steps:
+ *
+ * 1. Convert the absolute offset to a "stream offset" based on the partition
+ * information.
+ * 2. Find the segment for that "stream offset"
+ * 3. Match the entry within that segment
+ */
+
+ /* Convert to stream offset */
+ GST_LOG_OBJECT (demux,
+ "offset %" G_GUINT64_FORMAT " this_partition:%" G_GUINT64_FORMAT
+ " essence_container_offset:%" G_GINT64_FORMAT " partition body offset %"
+ G_GINT64_FORMAT, offset, partition->partition.this_partition,
+ partition->essence_container_offset, partition->partition.body_offset);
+ offset =
+ offset - partition->partition.this_partition -
+ partition->essence_container_offset + partition->partition.body_offset;
+
+ GST_LOG_OBJECT (demux, "stream offset %" G_GUINT64_FORMAT, offset);
+
+ /* Find the segment that covers the given stream offset (the highest one that
+ * covers that offset) */
+ for (i = index_table->segments->len - 1; i >= 0; i--) {
+ index_segment =
+ &g_array_index (index_table->segments, MXFIndexTableSegment, i);
+ GST_DEBUG_OBJECT (demux,
+ "Checking segment #%d (essence_offset %" G_GUINT64_FORMAT ")", i,
+ index_segment->segment_start_offset);
+ /* Not in the right segment yet */
+ if (offset >= index_segment->segment_start_offset) {
+ GST_LOG_OBJECT (demux, "Found");
+ break;
+ }
+ }
+ if (!index_segment) {
+ GST_WARNING_OBJECT (demux,
+ "Couldn't find index table segment for given offset");
+ return FALSE;
+ }
+
+ /* In the right segment, figure out:
+ * * the offset in the content package,
+ * * the position in edit units
+ * * the matching entry (if the table has entries)
+ */
+ if (index_segment->edit_unit_byte_count) {
+ cp_offset = offset % index_segment->edit_unit_byte_count;
+ position = offset / index_segment->edit_unit_byte_count;
+ /* Boundary check */
+ if ((position < index_segment->index_start_position)
+ || (index_segment->index_duration
+ && position >
+ (index_segment->index_start_position +
+ index_segment->index_duration))) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid offset, exceeds table segment limits");
+ return FALSE;
+ }
+ if (etrack->min_edit_units != 1) {
+ retentry->duration = MIN (etrack->min_edit_units,
+ (index_segment->index_start_position +
+ index_segment->index_duration) - position);
+ retentry->size = index_segment->edit_unit_byte_count * retentry->duration;
+ } else {
+ retentry->size = index_segment->edit_unit_byte_count;
+ }
+ } else {
+ /* Find the content package entry containing this offset */
+ guint cpidx;
+ for (cpidx = 0; cpidx < index_segment->n_index_entries; cpidx++) {
+ index_entry = &index_segment->index_entries[cpidx];
+ GST_DEBUG_OBJECT (demux,
+ "entry #%u offset:%" G_GUINT64_FORMAT " stream_offset:%"
+ G_GUINT64_FORMAT, cpidx, offset, index_entry->stream_offset);
+ if (index_entry->stream_offset == offset) {
+ index_entry = &index_segment->index_entries[cpidx];
+ /* exactly on the entry */
+ cp_offset = offset - index_entry->stream_offset;
+ position = index_segment->index_start_position + cpidx;
+ break;
+ }
+ if (index_entry->stream_offset > offset && cpidx > 0) {
+ index_entry = &index_segment->index_entries[cpidx - 1];
+ /* One too far, result is in previous entry */
+ cp_offset = offset - index_entry->stream_offset;
+ position = index_segment->index_start_position + cpidx - 1;
+ break;
+ }
+ }
+ if (cpidx == index_segment->n_index_entries) {
+ GST_WARNING_OBJECT (demux,
+ "offset exceeds maximum number of entries in table segment");
+ return FALSE;
+ }
+ }
+
+ /* If the track comes from an interleaved essence container and doesn't have a
+ * delta_id set, figure it out now */
+ if (G_UNLIKELY (etrack->delta_id == MXF_INDEX_DELTA_ID_UNKNOWN)) {
+ guint delta;
+ GST_DEBUG_OBJECT (demux,
+ "Unknown delta_id for track. Attempting to resolve it");
+
+ if (index_segment->n_delta_entries == 0) {
+ /* No delta entries, nothing we can do about this */
+ GST_DEBUG_OBJECT (demux, "Index table has no delta entries, ignoring");
+ etrack->delta_id = MXF_INDEX_DELTA_ID_IGNORE;
+ } else if (!index_entry) {
+ for (delta = 0; delta < index_segment->n_delta_entries; delta++) {
+ /* No entry, therefore no slices */
+ GST_LOG_OBJECT (demux,
+ "delta #%d offset %" G_GUINT64_FORMAT " cp_offs:%" G_GUINT64_FORMAT
+ " element_delta:%u", delta, offset, cp_offset,
+ index_segment->delta_entries[delta].element_delta);
+ if (cp_offset == index_segment->delta_entries[delta].element_delta) {
+ GST_DEBUG_OBJECT (demux, "Matched to delta %d", delta);
+ etrack->delta_id = delta;
+ delta_entry = &index_segment->delta_entries[delta];
+ break;
+ }
+ }
+ } else {
+ for (delta = 0; delta < index_segment->n_delta_entries; delta++) {
+ guint64 delta_offs = 0;
+ /* If we are not in the first slice, take that offset into account */
+ if (index_segment->delta_entries[delta].slice)
+ delta_offs =
+ index_entry->slice_offset[index_segment->
+ delta_entries[delta].slice - 1];
+ /* Add the offset for this delta */
+ delta_offs += index_segment->delta_entries[delta].element_delta;
+ if (cp_offset == delta_offs) {
+ GST_DEBUG_OBJECT (demux, "Matched to delta %d", delta);
+ etrack->delta_id = delta;
+ delta_entry = &index_segment->delta_entries[delta];
+ break;
+ }
+ }
+
+ }
+ /* If we didn't managed to match, ignore it from now on */
+ if (etrack->delta_id == MXF_INDEX_DELTA_ID_UNKNOWN) {
+ GST_WARNING_OBJECT (demux,
+ "Couldn't match delta id, ignoring it from now on");
+ etrack->delta_id = MXF_INDEX_DELTA_ID_IGNORE;
+ }
+ } else if (index_segment->n_delta_entries > 0) {
+ delta_entry = &index_segment->delta_entries[etrack->delta_id];
+ }
+
+ if (index_entry && delta_entry && delta_entry->pos_table_index == -1) {
+ retentry->keyframe = (index_entry->flags & 0x80) == 0x80;
+ if (!demux->temporal_order_misuse)
+ retentry->pts =
+ position + g_array_index (index_table->reverse_temporal_offsets,
+ gint8, position);
+ else
+ retentry->pts = position + index_entry->temporal_offset;
+ GST_LOG_OBJECT (demux,
+ "Applied temporal offset. dts:%" G_GINT64_FORMAT " pts:%"
+ G_GINT64_FORMAT, position, retentry->pts);
+ } else
+ retentry->pts = position;
+
+ /* FIXME : check if position and cp_offs matches the table */
+ GST_LOG_OBJECT (demux, "Found in index table. position:%" G_GINT64_FORMAT,
+ position);
+ retentry->initialized = TRUE;
+ retentry->offset = original_offset;
+ retentry->dts = position;
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_generic_container_essence_element (GstMXFDemux * demux,
+ GstMXFKLV * klv, gboolean peek)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 track_number;
+ guint i;
+ GstBuffer *inbuf = NULL;
+ GstBuffer *outbuf = NULL;
+ GstMXFDemuxEssenceTrack *etrack = NULL;
+ /* As in GstMXFDemuxIndex */
+ guint64 pts = G_MAXUINT64;
+ gint32 max_temporal_offset = 0;
+ GstMXFDemuxIndex index_entry = { 0, };
+ guint64 offset;
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling generic container essence element of size %" G_GSIZE_FORMAT
+ " at offset %" G_GUINT64_FORMAT, klv->length,
+ klv->offset + klv->consumed);
+
+ GST_DEBUG_OBJECT (demux, " type = 0x%02x", klv->key.u[12]);
+ GST_DEBUG_OBJECT (demux, " essence element count = 0x%02x", klv->key.u[13]);
+ GST_DEBUG_OBJECT (demux, " essence element type = 0x%02x", klv->key.u[14]);
+ GST_DEBUG_OBJECT (demux, " essence element number = 0x%02x", klv->key.u[15]);
+
+ if (demux->current_partition->essence_container_offset == 0) {
+ demux->current_partition->essence_container_offset =
+ demux->offset - demux->current_partition->partition.this_partition -
+ demux->run_in;
+ if (demux->current_partition->single_track
+ && demux->current_partition->single_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ demux->current_partition->essence_container_offset += klv->data_offset;
+ demux->current_partition->clip_klv = *klv;
+ /* "consume" the initial bytes of the KLV */
+ klv->consumed = klv->data_offset;
+ GST_DEBUG_OBJECT (demux,
+ "Non-frame wrapping, updated essence_container_offset to %"
+ G_GUINT64_FORMAT, demux->current_partition->essence_container_offset);
+ }
+ }
+
+ if (!demux->current_package) {
+ GST_ERROR_OBJECT (demux, "No package selected yet");
+ return GST_FLOW_ERROR;
+ }
+
+ if (demux->src->len == 0) {
+ GST_ERROR_OBJECT (demux, "No streams created yet");
+ return GST_FLOW_ERROR;
+ }
+
+ if (demux->essence_tracks->len == 0) {
+ GST_ERROR_OBJECT (demux, "No essence streams found in the metadata");
+ return GST_FLOW_ERROR;
+ }
+
+ /* Identify and fetch the essence track */
+ track_number = GST_READ_UINT32_BE (&klv->key.u[12]);
+
+ etrack = demux->current_partition->single_track;
+ if (!etrack) {
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *tmp =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (tmp->body_sid == demux->current_partition->partition.body_sid &&
+ (tmp->track_number == track_number || tmp->track_number == 0)) {
+ etrack = tmp;
+ break;
+ }
+ }
+
+ if (!etrack) {
+ GST_DEBUG_OBJECT (demux,
+ "No essence track for this essence element found");
+ return GST_FLOW_OK;
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling generic container essence (track %d , position:%"
+ G_GINT64_FORMAT ", number: 0x%08x , frame-wrapped:%d)", etrack->track_id,
+ etrack->position, track_number,
+ etrack->wrapping == MXF_ESSENCE_WRAPPING_FRAME_WRAPPING);
+
+ /* Fetch the current entry.
+ *
+ * 1. If we don't have a current position, use find_entry_for_offset()
+ * 2. If we do have a position, use find_edit_entry()
+ *
+ * 3. If we are dealing with frame-wrapped content, pull the corresponding
+ * data from upstream (because it wasn't provided). If we didn't find an
+ * entry, error out because we can't deal with a frame-wrapped stream
+ * without index.
+ */
+
+ offset = klv->offset + klv->consumed;
+
+ /* Update the track position (in case of resyncs) */
+ if (etrack->position == -1) {
+ GST_DEBUG_OBJECT (demux,
+ "Unknown essence track position, looking into index");
+ if (!find_entry_for_offset (demux, etrack, offset - demux->run_in,
+ &index_entry)) {
+ GST_WARNING_OBJECT (demux, "Essence track position not in index");
+ return GST_FLOW_OK;
+ }
+ /* Update track position */
+ etrack->position = index_entry.dts;
+ } else if (etrack->delta_id == MXF_INDEX_DELTA_ID_UNKNOWN) {
+ GST_DEBUG_OBJECT (demux,
+ "Unknown essence track delta_id, looking into index");
+ if (!find_entry_for_offset (demux, etrack, offset - demux->run_in,
+ &index_entry)) {
+ /* Non-fatal, fallback to legacy mode */
+ GST_WARNING_OBJECT (demux, "Essence track position not in index");
+ } else if (etrack->position != index_entry.dts) {
+ GST_ERROR_OBJECT (demux,
+ "track position doesn't match %" G_GINT64_FORMAT " entry dts %"
+ G_GINT64_FORMAT, etrack->position, index_entry.dts);
+ return GST_FLOW_ERROR;
+ }
+ } else {
+ if (!find_edit_entry (demux, etrack, etrack->position, FALSE, &index_entry)) {
+ GST_DEBUG_OBJECT (demux, "Couldn't find entry");
+ } else if (etrack->wrapping == MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ if (etrack->delta_id != MXF_INDEX_DELTA_ID_IGNORE
+ && index_entry.offset != offset) {
+ GST_ERROR_OBJECT (demux,
+ "demux offset doesn't match %" G_GINT64_FORMAT " entry offset %"
+ G_GUINT64_FORMAT, offset, index_entry.offset);
+ return GST_FLOW_ERROR;
+ }
+ } else if (index_entry.offset != klv->offset + klv->consumed &&
+ index_entry.offset != klv->offset + klv->data_offset) {
+ GST_ERROR_OBJECT (demux,
+ "KLV offset doesn't match %" G_GINT64_FORMAT " entry offset %"
+ G_GUINT64_FORMAT, klv->offset + klv->consumed, index_entry.offset);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (etrack->wrapping != MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ /* We need entry information to deal with non-frame-wrapped content */
+ if (!index_entry.initialized) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE, (NULL),
+ ("Essence with non-frame-wrapping require an index table to be present"));
+ return GST_FLOW_ERROR;
+ }
+ /* We cannot deal with non-frame-wrapping in push mode for now */
+ if (!demux->random_access) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE, (NULL),
+ ("Non-frame-wrapping is not support in push mode"));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* FIXME : If we're peeking and don't need to actually parse the data, we
+ * should avoid pulling the content from upstream */
+ if (etrack->wrapping != MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ g_assert (index_entry.size);
+ GST_DEBUG_OBJECT (demux, "Should only grab %" G_GUINT64_FORMAT " bytes",
+ index_entry.size);
+ ret =
+ gst_mxf_demux_pull_range (demux, index_entry.offset, index_entry.size,
+ &inbuf);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ if (klv->consumed == 0)
+ klv->consumed = klv->data_offset + index_entry.size;
+ else
+ klv->consumed += index_entry.size;
+ if (klv != &demux->current_partition->clip_klv)
+ demux->current_partition->clip_klv = *klv;
+ GST_LOG_OBJECT (demux,
+ "klv data_offset:%" G_GUINT64_FORMAT " length:%" G_GSIZE_FORMAT
+ " consumed:%" G_GUINT64_FORMAT, klv->data_offset, klv->length,
+ klv->consumed);
+ /* Switch back to KLV mode if we're done with this one */
+ if (klv->length + klv->data_offset == klv->consumed)
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+ else
+ demux->state = GST_MXF_DEMUX_STATE_ESSENCE;
+ } else {
+
+ ret = gst_mxf_demux_fill_klv (demux, klv);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Create subbuffer to be able to change metadata */
+ inbuf =
+ gst_buffer_copy_region (klv->data, GST_BUFFER_COPY_ALL, 0,
+ gst_buffer_get_size (klv->data));
+
+ }
+
+ if (index_entry.initialized) {
+ GST_DEBUG_OBJECT (demux, "Got entry dts:%" G_GINT64_FORMAT " keyframe:%d",
+ index_entry.dts, index_entry.keyframe);
+ }
+ if (index_entry.initialized && !index_entry.keyframe)
+ GST_BUFFER_FLAG_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (etrack->handle_func) {
+ /* Takes ownership of inbuf */
+ ret =
+ etrack->handle_func (&klv->key, inbuf, etrack->caps,
+ etrack->source_track, etrack->mapping_data, &outbuf);
+ inbuf = NULL;
+ } else {
+ outbuf = inbuf;
+ inbuf = NULL;
+ ret = GST_FLOW_OK;
+ }
+
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (demux, "Failed to handle essence element");
+ if (outbuf) {
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
+ }
+ return ret;
+ }
+
+ if (!index_entry.initialized) {
+ /* This can happen when doing scanning without entry tables */
+ index_entry.duration = 1;
+ index_entry.offset = demux->offset - demux->run_in;
+ index_entry.dts = etrack->position;
+ index_entry.pts = etrack->intra_only ? etrack->position : G_MAXUINT64;
+ index_entry.keyframe =
+ !GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ index_entry.initialized = TRUE;
+ GST_DEBUG_OBJECT (demux,
+ "Storing newly discovered information on track %d. dts: %"
+ G_GINT64_FORMAT " offset:%" G_GUINT64_FORMAT " keyframe:%d",
+ etrack->track_id, index_entry.dts, index_entry.offset,
+ index_entry.keyframe);
+
+ if (!etrack->offsets)
+ etrack->offsets = g_array_new (FALSE, TRUE, sizeof (GstMXFDemuxIndex));
+
+ /* We only ever append to the track offset entry. */
+ g_assert (etrack->position <= etrack->offsets->len);
+ g_array_insert_val (etrack->offsets, etrack->position, index_entry);
+ }
+
+ if (peek)
+ goto out;
+
+ if (!outbuf) {
+ GST_DEBUG_OBJECT (demux, "No output buffer created");
+ goto out;
+ }
+
+ inbuf = outbuf;
+ outbuf = NULL;
+
+ max_temporal_offset = get_track_max_temporal_offset (demux, etrack);
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *pad = g_ptr_array_index (demux->src, i);
+
+ if (pad->current_essence_track != etrack)
+ continue;
+
+ if (pad->eos) {
+ GST_DEBUG_OBJECT (pad, "Pad is already EOS");
+ continue;
+ }
+
+ if (etrack->position < pad->current_essence_track_position) {
+ GST_DEBUG_OBJECT (pad,
+ "Not at current component's position (track:%" G_GINT64_FORMAT
+ " essence:%" G_GINT64_FORMAT ")", etrack->position,
+ pad->current_essence_track_position);
+ continue;
+ }
+
+ {
+ GstMXFDemuxPad *earliest = gst_mxf_demux_get_earliest_pad (demux);
+
+ if (earliest && earliest != pad && earliest->position < pad->position &&
+ pad->position - earliest->position > demux->max_drift) {
+ GST_DEBUG_OBJECT (earliest,
+ "Pad is too far ahead of time (%" GST_TIME_FORMAT " vs earliest:%"
+ GST_TIME_FORMAT ")", GST_TIME_ARGS (earliest->position),
+ GST_TIME_ARGS (pad->position));
+ continue;
+ }
+ }
+
+ /* Create another subbuffer to have writable metadata */
+ outbuf =
+ gst_buffer_copy_region (inbuf, GST_BUFFER_COPY_ALL, 0,
+ gst_buffer_get_size (inbuf));
+
+ pts = index_entry.pts;
+
+ GST_BUFFER_DTS (outbuf) = pad->position;
+ if (etrack->intra_only) {
+ GST_BUFFER_PTS (outbuf) = pad->position;
+ } else if (pts != G_MAXUINT64) {
+ GST_BUFFER_PTS (outbuf) = gst_util_uint64_scale (pts * GST_SECOND,
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->current_essence_track->source_track->edit_rate.n);
+ GST_BUFFER_PTS (outbuf) +=
+ gst_util_uint64_scale (pad->current_component_start_position *
+ GST_SECOND, pad->material_track->edit_rate.d,
+ pad->material_track->edit_rate.n);
+ /* We are dealing with reordered data, the PTS is shifted forward by the
+ * maximum temporal reordering (the DTS remain as-is). */
+ if (max_temporal_offset > 0)
+ GST_BUFFER_PTS (outbuf) +=
+ gst_util_uint64_scale (max_temporal_offset * GST_SECOND,
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->current_essence_track->source_track->edit_rate.n);
+
+ } else {
+ GST_BUFFER_PTS (outbuf) = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (GST_SECOND,
+ index_entry.duration *
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->current_essence_track->source_track->edit_rate.n);
+ GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
+
+ if (pad->material_track->parent.type == MXF_METADATA_TRACK_PICTURE_ESSENCE
+ && pad->start_timecode.config.fps_n != 0
+ && pad->start_timecode.config.fps_d != 0) {
+ if (etrack->intra_only) {
+ GstVideoTimeCode timecode = pad->start_timecode;
+
+ gst_video_time_code_add_frames (&timecode,
+ pad->current_material_track_position);
+ gst_buffer_add_video_time_code_meta (outbuf, &timecode);
+ } else if (pts != G_MAXUINT64) {
+ GstVideoTimeCode timecode = pad->start_timecode;
+
+ gst_video_time_code_add_frames (&timecode,
+ pad->current_component_start_position);
+ gst_video_time_code_add_frames (&timecode,
+ gst_util_uint64_scale (pts,
+ pad->material_track->edit_rate.n *
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->material_track->edit_rate.d *
+ pad->current_essence_track->source_track->edit_rate.n));
+ gst_buffer_add_video_time_code_meta (outbuf, &timecode);
+ }
+
+ }
+
+ /* Update accumulated error and compensate */
+ {
+ guint64 abs_error =
+ (GST_SECOND * pad->current_essence_track->source_track->edit_rate.d) %
+ pad->current_essence_track->source_track->edit_rate.n;
+ pad->position_accumulated_error +=
+ ((gdouble) abs_error) /
+ ((gdouble) pad->current_essence_track->source_track->edit_rate.n);
+ }
+ if (pad->position_accumulated_error >= 1.0) {
+ GST_BUFFER_DURATION (outbuf) += 1;
+ pad->position_accumulated_error -= 1.0;
+ }
+
+ if (pad->need_segment) {
+ GstEvent *e;
+
+ if (demux->close_seg_event)
+ gst_pad_push_event (GST_PAD_CAST (pad),
+ gst_event_ref (demux->close_seg_event));
+
+ if (max_temporal_offset > 0) {
+ GstSegment shift_segment;
+ /* Handle maximum temporal offset. We are shifting all output PTS for
+ * this stream by the greatest temporal reordering that can occur. In
+ * order not to change the stream/running time we shift the segment
+ * start and stop values accordingly */
+ gst_segment_copy_into (&demux->segment, &shift_segment);
+ if (GST_CLOCK_TIME_IS_VALID (shift_segment.start))
+ shift_segment.start +=
+ gst_util_uint64_scale (max_temporal_offset * GST_SECOND,
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->current_essence_track->source_track->edit_rate.n);
+ if (GST_CLOCK_TIME_IS_VALID (shift_segment.stop))
+ shift_segment.stop +=
+ gst_util_uint64_scale (max_temporal_offset * GST_SECOND,
+ pad->current_essence_track->source_track->edit_rate.d,
+ pad->current_essence_track->source_track->edit_rate.n);
+ e = gst_event_new_segment (&shift_segment);
+ } else
+ e = gst_event_new_segment (&demux->segment);
+ GST_DEBUG_OBJECT (pad, "Sending segment %" GST_PTR_FORMAT, e);
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (pad), e);
+ pad->need_segment = FALSE;
+ }
+
+ if (pad->tags) {
+ gst_pad_push_event (GST_PAD_CAST (pad), gst_event_new_tag (pad->tags));
+ pad->tags = NULL;
+ }
+
+ pad->position += GST_BUFFER_DURATION (outbuf);
+ pad->current_material_track_position += index_entry.duration;
+
+ if (pad->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ pad->discont = FALSE;
+ }
+
+ /* Handlers can provide empty GAP buffers to indicate that the parsed
+ * content was valid but that nothing meaningful needs to be outputted. In
+ * such cases we send out a GAP event instead */
+ if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP) &&
+ gst_buffer_get_size (outbuf) == 0) {
+ GstEvent *gap = gst_event_new_gap (GST_BUFFER_DTS (outbuf),
+ GST_BUFFER_DURATION (outbuf));
+ gst_buffer_unref (outbuf);
+ GST_DEBUG_OBJECT (pad,
+ "Replacing empty gap buffer with gap event %" GST_PTR_FORMAT, gap);
+ gst_pad_push_event (GST_PAD_CAST (pad), gap);
+ } else {
+ GST_DEBUG_OBJECT (pad,
+ "Pushing buffer of size %" G_GSIZE_FORMAT " for track %u: pts %"
+ GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT
+ " position %" G_GUINT64_FORMAT, gst_buffer_get_size (outbuf),
+ pad->material_track->parent.track_id,
+ GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
+ pad->current_essence_track_position);
+
+ ret = gst_pad_push (GST_PAD_CAST (pad), outbuf);
+ }
+ outbuf = NULL;
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+ GST_LOG_OBJECT (pad, "combined return %s", gst_flow_get_name (ret));
+
+ if (pad->position > demux->segment.position)
+ demux->segment.position = pad->position;
+
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ pad->current_essence_track_position += index_entry.duration;
+
+ if (pad->current_component) {
+ if (pad->current_component_duration > 0 &&
+ pad->current_essence_track_position - pad->current_component_start
+ >= pad->current_component_duration) {
+ GST_DEBUG_OBJECT (demux, "Switching to next component");
+
+ ret =
+ gst_mxf_demux_pad_set_component (demux, pad,
+ pad->current_component_index + 1);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS) {
+ GST_ERROR_OBJECT (demux, "Switching component failed");
+ } else {
+ pad->current_essence_track->position =
+ pad->current_essence_track_position;
+ }
+ } else if (etrack->duration > 0
+ && pad->current_essence_track_position >= etrack->duration) {
+ GST_DEBUG_OBJECT (demux,
+ "Current component position after end of essence track");
+ ret = GST_FLOW_EOS;
+ }
+ } else if (etrack->duration > 0
+ && pad->current_essence_track_position == etrack->duration) {
+ GST_DEBUG_OBJECT (demux, "At the end of the essence track");
+ ret = GST_FLOW_EOS;
+ }
+
+ if (ret == GST_FLOW_EOS) {
+ GstEvent *e;
+
+ GST_DEBUG_OBJECT (pad, "EOS for track");
+ pad->eos = TRUE;
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (pad), e);
+ ret = GST_FLOW_OK;
+ }
+
+ if (ret != GST_FLOW_OK)
+ goto out;
+ }
+
+ out:
+ if (inbuf)
+ gst_buffer_unref (inbuf);
+
+ if (outbuf)
+ gst_buffer_unref (outbuf);
+
+ etrack->position += index_entry.duration;
+
+ return ret;
+ }
+
+ /*
+ * Called when analyzing the (RIP) Random Index Pack.
+ *
+ * FIXME : If a file doesn't have a RIP, we should iterate the partition headers
+ * to collect as much information as possible.
+ *
+ * This function collects as much information as possible from the partition headers:
+ * * Store partition information in the list of partitions
+ * * Handle any index table segment present
+ */
+ static void
+ read_partition_header (GstMXFDemux * demux)
+ {
+ GstMXFKLV klv;
+
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv) != GST_FLOW_OK
+ || !mxf_is_partition_pack (&klv.key)) {
+ return;
+ }
+
+ if (gst_mxf_demux_handle_partition_pack (demux, &klv) != GST_FLOW_OK) {
+ if (klv.data)
+ gst_buffer_unref (klv.data);
+ return;
+ }
+ gst_mxf_demux_consume_klv (demux, &klv);
+
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv) != GST_FLOW_OK)
+ return;
+
+ while (mxf_is_fill (&klv.key)) {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv) != GST_FLOW_OK)
+ return;
+ }
+
+ if (!mxf_is_index_table_segment (&klv.key)
+ && demux->current_partition->partition.header_byte_count) {
+ demux->offset += demux->current_partition->partition.header_byte_count;
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv) != GST_FLOW_OK)
+ return;
+ }
+
+ while (mxf_is_fill (&klv.key)) {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv) != GST_FLOW_OK)
+ return;
+ }
+
+ if (demux->current_partition->partition.index_byte_count
+ && mxf_is_index_table_segment (&klv.key)) {
+ guint64 index_end_offset =
+ demux->offset + demux->current_partition->partition.index_byte_count;
+
+ while (demux->offset < index_end_offset) {
+ if (mxf_is_index_table_segment (&klv.key))
+ gst_mxf_demux_handle_index_table_segment (demux, &klv);
+ gst_mxf_demux_consume_klv (demux, &klv);
+
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv) != GST_FLOW_OK)
+ return;
+ }
+ }
+
+ while (mxf_is_fill (&klv.key)) {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ if (gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv) != GST_FLOW_OK)
+ return;
+ }
+
+ if (mxf_is_generic_container_system_item (&klv.key) ||
+ mxf_is_generic_container_essence_element (&klv.key) ||
+ mxf_is_avid_essence_container_essence_element (&klv.key)) {
+ if (demux->current_partition->essence_container_offset == 0)
+ demux->current_partition->essence_container_offset =
+ demux->offset - demux->current_partition->partition.this_partition -
+ demux->run_in;
+ }
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_random_index_pack (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ guint i;
+ GList *l;
+ GstMapInfo map;
+ gboolean ret;
+ GstFlowReturn flowret;
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling random index pack of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT, klv->length, klv->offset);
+
+ if (demux->random_index_pack) {
+ GST_DEBUG_OBJECT (demux, "Already parsed random index pack");
+ return GST_FLOW_OK;
+ }
+
+ flowret = gst_mxf_demux_fill_klv (demux, klv);
+ if (flowret != GST_FLOW_OK)
+ return flowret;
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ ret =
+ mxf_random_index_pack_parse (&klv->key, map.data, map.size,
+ &demux->random_index_pack);
+ gst_buffer_unmap (klv->data, &map);
+
+ if (!ret) {
+ GST_ERROR_OBJECT (demux, "Parsing random index pack failed");
+ return GST_FLOW_ERROR;
+ }
+
+ for (i = 0; i < demux->random_index_pack->len; i++) {
+ GstMXFDemuxPartition *p = NULL;
+ MXFRandomIndexPackEntry *e =
+ &g_array_index (demux->random_index_pack, MXFRandomIndexPackEntry, i);
+
+ if (e->offset < demux->run_in) {
+ GST_ERROR_OBJECT (demux, "Invalid random index pack entry");
+ return GST_FLOW_ERROR;
+ }
+
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *tmp = l->data;
+
+ if (tmp->partition.this_partition + demux->run_in == e->offset) {
+ p = tmp;
+ break;
+ }
+ }
+
+ if (!p) {
+ p = g_new0 (GstMXFDemuxPartition, 1);
+ p->partition.this_partition = e->offset - demux->run_in;
+ p->partition.body_sid = e->body_sid;
+ demux->partitions =
+ g_list_insert_sorted (demux->partitions, p,
+ (GCompareFunc) gst_mxf_demux_partition_compare);
+ }
+ }
+
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *a, *b;
+
+ if (l->next == NULL)
+ break;
+
+ a = l->data;
+ b = l->next->data;
+
+ b->partition.prev_partition = a->partition.this_partition;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static gint
+ compare_index_table_segment (MXFIndexTableSegment * sa,
+ MXFIndexTableSegment * sb)
+ {
+ if (mxf_uuid_is_equal (&sa->instance_id, &sb->instance_id))
+ return 0;
+ if (sa->body_sid != sb->body_sid)
+ return (sa->body_sid < sb->body_sid) ? -1 : 1;
+ if (sa->index_sid != sb->index_sid)
+ return (sa->index_sid < sb->index_sid) ? -1 : 1;
+ /* Finally sort by index start position */
+ if (sa->index_start_position < sb->index_start_position)
+ return -1;
+ return (sa->index_start_position != sb->index_start_position);
+ }
+
+ #if !GLIB_CHECK_VERSION(2, 62, 0)
+ static gboolean
+ has_table_segment (GArray * segments, MXFIndexTableSegment * target)
+ {
+ guint i;
+ for (i = 0; i < segments->len; i++) {
+ MXFIndexTableSegment *cand =
+ &g_array_index (segments, MXFIndexTableSegment, i);
+ if (mxf_uuid_is_equal (&cand->instance_id, &target->instance_id))
+ return TRUE;
+ }
+ return FALSE;
+ }
+ #endif
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_index_table_segment (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ MXFIndexTableSegment *segment;
+ GstMapInfo map;
+ gboolean ret;
+ GList *tmp;
+ GstFlowReturn flowret;
+
+ flowret = gst_mxf_demux_fill_klv (demux, klv);
+ if (flowret != GST_FLOW_OK)
+ return flowret;
+
+ GST_DEBUG_OBJECT (demux,
+ "Handling index table segment of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT, klv->length, klv->offset);
+
+ segment = g_new0 (MXFIndexTableSegment, 1);
+
+ gst_buffer_map (klv->data, &map, GST_MAP_READ);
+ ret = mxf_index_table_segment_parse (&klv->key, segment, map.data, map.size);
+ gst_buffer_unmap (klv->data, &map);
+
+ if (!ret) {
+ GST_ERROR_OBJECT (demux, "Parsing index table segment failed");
+ g_free (segment);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Drop it if we already saw it. Ideally we should be able to do this before
+ parsing (by checking instance UID) */
+ if (g_list_find_custom (demux->pending_index_table_segments, segment,
+ (GCompareFunc) compare_index_table_segment)) {
+ GST_DEBUG_OBJECT (demux, "Already in pending list");
+ g_free (segment);
+ return GST_FLOW_OK;
+ }
+ for (tmp = demux->index_tables; tmp; tmp = tmp->next) {
+ GstMXFDemuxIndexTable *table = (GstMXFDemuxIndexTable *) tmp->data;
+ #if !GLIB_CHECK_VERSION (2, 62, 0)
+ if (has_table_segment (table->segments, segment)) {
+ #else
+ if (g_array_binary_search (table->segments, segment,
+ (GCompareFunc) compare_index_table_segment, NULL)) {
+ #endif
+ GST_DEBUG_OBJECT (demux, "Already handled");
+ g_free (segment);
+ return GST_FLOW_OK;
+ }
+ }
+
+ demux->pending_index_table_segments =
+ g_list_insert_sorted (demux->pending_index_table_segments, segment,
+ (GCompareFunc) compare_index_table_segment);
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_peek_klv_packet (GstMXFDemux * demux, guint64 offset,
+ GstMXFKLV * klv)
+ {
+ GstBuffer *buffer = NULL;
+ const guint8 *data;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ memset (klv, 0, sizeof (GstMXFKLV));
+ klv->offset = offset;
+
+ /* Pull 16 byte key and first byte of BER encoded length */
+ if ((ret =
+ gst_mxf_demux_pull_range (demux, offset, 17, &buffer)) != GST_FLOW_OK)
+ goto beach;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ memcpy (&klv->key, map.data, 16);
+
+ /* Decode BER encoded packet length */
+ if ((map.data[16] & 0x80) == 0) {
+ klv->length = map.data[16];
+ klv->data_offset = 17;
+ } else {
+ guint slen = map.data[16] & 0x7f;
+
+ klv->data_offset = 16 + 1 + slen;
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ /* Must be at most 8 according to SMPTE-379M 5.3.4 */
+ if (slen > 8) {
+ GST_ERROR_OBJECT (demux, "Invalid KLV packet length: %u", slen);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* Now pull the length of the packet */
+ if ((ret = gst_mxf_demux_pull_range (demux, offset + 17, slen,
+ &buffer)) != GST_FLOW_OK)
+ goto beach;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ data = map.data;
+ klv->length = 0;
+ while (slen) {
+ klv->length = (klv->length << 8) | *data;
+ data++;
+ slen--;
+ }
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ /* GStreamer's buffer sizes are stored in a guint so we
+ * limit ourself to G_MAXUINT large buffers */
+ if (klv->length > G_MAXUINT) {
+ GST_ERROR_OBJECT (demux,
+ "Unsupported KLV packet length: %" G_GSIZE_FORMAT, klv->length);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "Found KLV packet at offset %" G_GUINT64_FORMAT " with key %s and length "
+ "%" G_GSIZE_FORMAT, offset, mxf_ul_to_string (&klv->key, str),
+ klv->length);
+
+ beach:
+ if (buffer)
+ gst_buffer_unref (buffer);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_fill_klv (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ if (klv->data)
+ return GST_FLOW_OK;
+ GST_DEBUG_OBJECT (demux,
+ "Pulling %" G_GSIZE_FORMAT " bytes from offset %" G_GUINT64_FORMAT,
+ klv->length, klv->offset + klv->data_offset);
+ return gst_mxf_demux_pull_range (demux, klv->offset + klv->data_offset,
+ klv->length, &klv->data);
+ }
+
+ /* Call when done with a klv. Will release the buffer (if any) and will update
+ * the demuxer offset position. Do *NOT* call if you do not want the demuxer
+ * offset to be updated */
+ static void
+ gst_mxf_demux_consume_klv (GstMXFDemux * demux, GstMXFKLV * klv)
+ {
+ if (klv->data) {
+ gst_buffer_unref (klv->data);
+ klv->data = NULL;
+ }
+ GST_DEBUG_OBJECT (demux,
+ "Consuming KLV offset:%" G_GUINT64_FORMAT " data_offset:%"
+ G_GUINT64_FORMAT " length:%" G_GSIZE_FORMAT " consumed:%"
+ G_GUINT64_FORMAT, klv->offset, klv->data_offset, klv->length,
+ klv->consumed);
+ if (klv->consumed)
+ demux->offset = klv->offset + klv->consumed;
+ else
+ demux->offset += klv->data_offset + klv->length;
+ }
+
+ static void
+ gst_mxf_demux_pull_random_index_pack (GstMXFDemux * demux)
+ {
+ GstBuffer *buffer;
+ gint64 filesize = -1;
+ GstFormat fmt = GST_FORMAT_BYTES;
+ guint32 pack_size;
+ guint64 old_offset = demux->offset;
+ GstMapInfo map;
+ GstFlowReturn flow_ret;
+ GstMXFKLV klv;
+
+ if (!gst_pad_peer_query_duration (demux->sinkpad, fmt, &filesize) ||
+ fmt != GST_FORMAT_BYTES || filesize == -1) {
+ GST_DEBUG_OBJECT (demux, "Can't query upstream size");
+ return;
+ }
+
+ g_assert (filesize > 4);
+
+ buffer = NULL;
+ if (gst_mxf_demux_pull_range (demux, filesize - 4, 4, &buffer) != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Failed pulling last 4 bytes");
+ return;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ pack_size = GST_READ_UINT32_BE (map.data);
+ gst_buffer_unmap (buffer, &map);
+
+ gst_buffer_unref (buffer);
+
+ if (pack_size < 20) {
+ GST_DEBUG_OBJECT (demux, "Too small pack size (%u bytes)", pack_size);
+ return;
+ } else if (pack_size > filesize - 20) {
+ GST_DEBUG_OBJECT (demux, "Too large pack size (%u bytes)", pack_size);
+ return;
+ }
+
+ /* Peek for klv at filesize - pack_size */
+ if (gst_mxf_demux_peek_klv_packet (demux, filesize - pack_size,
+ &klv) != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Failed pulling random index pack key");
+ return;
+ }
+
+ if (!mxf_is_random_index_pack (&klv.key)) {
+ GST_DEBUG_OBJECT (demux, "No random index pack");
+ return;
+ }
+
+ demux->offset = filesize - pack_size;
+ flow_ret = gst_mxf_demux_handle_random_index_pack (demux, &klv);
+ if (klv.data)
+ gst_buffer_unref (klv.data);
+ demux->offset = old_offset;
+
+ if (flow_ret == GST_FLOW_OK && !demux->index_table_segments_collected) {
+ collect_index_table_segments (demux);
+ demux->index_table_segments_collected = TRUE;
+ }
+ }
+
+ static void
+ gst_mxf_demux_parse_footer_metadata (GstMXFDemux * demux)
+ {
+ guint64 old_offset = demux->offset;
+ GstMXFKLV klv;
+ GstFlowReturn flow = GST_FLOW_OK;
+ GstMXFDemuxPartition *old_partition = demux->current_partition;
+
+ GST_DEBUG_OBJECT (demux, "Parsing footer metadata");
+
+ demux->current_partition = NULL;
+
+ gst_mxf_demux_reset_metadata (demux);
+
+ if (demux->footer_partition_pack_offset != 0) {
+ demux->offset = demux->run_in + demux->footer_partition_pack_offset;
+ } else {
+ MXFRandomIndexPackEntry *entry =
+ &g_array_index (demux->random_index_pack, MXFRandomIndexPackEntry,
+ demux->random_index_pack->len - 1);
+ demux->offset = entry->offset;
+ }
+
+ next_try:
+ GST_LOG_OBJECT (demux, "Peeking partition pack at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+
+ /* Process Partition Pack */
+ flow = gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv);
+ if (G_UNLIKELY (flow != GST_FLOW_OK))
+ goto out;
+
+ if (!mxf_is_partition_pack (&klv.key))
+ goto out;
+
+ if (gst_mxf_demux_handle_partition_pack (demux, &klv) != GST_FLOW_OK) {
+ if (klv.data)
+ gst_buffer_unref (klv.data);
+ goto out;
+ }
+
+ gst_mxf_demux_consume_klv (demux, &klv);
+
+ /* If there's no Header Metadata in this partition, jump to the previous
+ * one */
+ if (demux->current_partition->partition.header_byte_count == 0) {
+ /* Reached the first partition, bail out */
+ if (demux->current_partition->partition.this_partition == 0)
+ goto out;
+
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+
+ /* Next up should be an optional fill pack followed by a primer pack */
+ while (TRUE) {
+ flow = gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv);
+ if (G_UNLIKELY (flow != GST_FLOW_OK)) {
+ /* If ever we can't get the next KLV, jump to the previous partition */
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+
+ if (mxf_is_fill (&klv.key)) {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ } else if (mxf_is_primer_pack (&klv.key)) {
+ /* Update primer mapping if present (jump to previous if it failed) */
+ if (!demux->current_partition->primer.mappings) {
+ if (gst_mxf_demux_handle_primer_pack (demux, &klv) != GST_FLOW_OK) {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in +
+ demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+ }
+ gst_mxf_demux_consume_klv (demux, &klv);
+ break;
+ } else {
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+ }
+
+ /* parse metadata for this partition */
+ while (demux->offset <
+ demux->run_in + demux->current_partition->primer.offset +
+ demux->current_partition->partition.header_byte_count) {
+ flow = gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv);
+ if (G_UNLIKELY (flow != GST_FLOW_OK)) {
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+
+ if (mxf_is_metadata (&klv.key)) {
+ flow = gst_mxf_demux_handle_metadata (demux, &klv);
+ gst_mxf_demux_consume_klv (demux, &klv);
+
+ if (G_UNLIKELY (flow != GST_FLOW_OK)) {
+ gst_mxf_demux_reset_metadata (demux);
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+ } else if (mxf_is_descriptive_metadata (&klv.key)) {
+ gst_mxf_demux_handle_descriptive_metadata (demux, &klv);
+ gst_mxf_demux_consume_klv (demux, &klv);
+ } else {
+ gst_mxf_demux_consume_klv (demux, &klv);
+ }
+ }
+
+ /* resolve references etc */
+ if (!demux->preface || gst_mxf_demux_resolve_references (demux) !=
+ GST_FLOW_OK || gst_mxf_demux_update_tracks (demux) != GST_FLOW_OK) {
+ /* Don't attempt to parse metadata from this partition again */
+ demux->current_partition->parsed_metadata = TRUE;
+ /* Skip to previous partition or bail out */
+ if (!demux->current_partition->partition.prev_partition)
+ goto out;
+ demux->offset =
+ demux->run_in + demux->current_partition->partition.prev_partition;
+ goto next_try;
+ }
+
+ out:
+ demux->offset = old_offset;
+ demux->current_partition = old_partition;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_handle_klv_packet (GstMXFDemux * demux, GstMXFKLV * klv,
+ gboolean peek)
+ {
+ MXFUL *key = &klv->key;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar key_str[48];
+ #endif
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (demux->update_metadata
+ && demux->preface
+ && (demux->offset >=
+ demux->run_in + demux->current_partition->primer.offset +
+ demux->current_partition->partition.header_byte_count ||
+ mxf_is_generic_container_system_item (key) ||
+ mxf_is_generic_container_essence_element (key) ||
+ mxf_is_avid_essence_container_essence_element (key))) {
+ demux->current_partition->parsed_metadata = TRUE;
+ if ((ret = gst_mxf_demux_resolve_references (demux)) != GST_FLOW_OK ||
+ (ret = gst_mxf_demux_update_tracks (demux)) != GST_FLOW_OK) {
+ goto beach;
+ }
+ } else if (demux->metadata_resolved && demux->requested_package_string) {
+ if ((ret = gst_mxf_demux_update_tracks (demux)) != GST_FLOW_OK) {
+ goto beach;
+ }
+ }
+
+ if (!mxf_is_mxf_packet (key)) {
+ GST_WARNING_OBJECT (demux,
+ "Skipping non-MXF packet of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT ", key: %s", klv->length,
+ demux->offset, mxf_ul_to_string (key, key_str));
+ } else if (mxf_is_partition_pack (key)) {
+ ret = gst_mxf_demux_handle_partition_pack (demux, klv);
+ } else if (mxf_is_primer_pack (key)) {
+ ret = gst_mxf_demux_handle_primer_pack (demux, klv);
+ } else if (mxf_is_metadata (key)) {
+ ret = gst_mxf_demux_handle_metadata (demux, klv);
+ } else if (mxf_is_descriptive_metadata (key)) {
+ ret = gst_mxf_demux_handle_descriptive_metadata (demux, klv);
+ } else if (mxf_is_generic_container_system_item (key)) {
+ if (demux->pending_index_table_segments)
+ collect_index_table_segments (demux);
+ ret = gst_mxf_demux_handle_generic_container_system_item (demux, klv);
+ } else if (mxf_is_generic_container_essence_element (key) ||
+ mxf_is_avid_essence_container_essence_element (key)) {
+ if (demux->pending_index_table_segments)
+ collect_index_table_segments (demux);
+ ret =
+ gst_mxf_demux_handle_generic_container_essence_element (demux, klv,
+ peek);
+ } else if (mxf_is_random_index_pack (key)) {
+ ret = gst_mxf_demux_handle_random_index_pack (demux, klv);
+
+ if (ret == GST_FLOW_OK && demux->random_access
+ && !demux->index_table_segments_collected) {
+ collect_index_table_segments (demux);
+ demux->index_table_segments_collected = TRUE;
+ }
+ } else if (mxf_is_index_table_segment (key)) {
+ ret = gst_mxf_demux_handle_index_table_segment (demux, klv);
+ } else if (mxf_is_fill (key)) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping filler packet of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT, klv->length, demux->offset);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping unknown packet of size %" G_GSIZE_FORMAT " at offset %"
+ G_GUINT64_FORMAT ", key: %s", klv->length,
+ demux->offset, mxf_ul_to_string (key, key_str));
+ }
+
+ beach:
+ return ret;
+ }
+
+ static void
+ gst_mxf_demux_set_partition_for_offset (GstMXFDemux * demux, guint64 offset)
+ {
+ GList *l;
+
+ GST_LOG_OBJECT (demux, "offset %" G_GUINT64_FORMAT, offset);
+
+ /* This partition will already be parsed, otherwise
+ * the position wouldn't be in the index */
+ for (l = demux->partitions; l; l = l->next) {
+ GstMXFDemuxPartition *p = l->data;
+
+ if (p->partition.this_partition + demux->run_in <= offset)
+ demux->current_partition = p;
+ }
+ if (demux->current_partition)
+ GST_DEBUG_OBJECT (demux,
+ "Current partition now %p (body_sid:%d index_sid:%d this_partition:%"
+ G_GUINT64_FORMAT ")", demux->current_partition,
+ demux->current_partition->partition.body_sid,
+ demux->current_partition->partition.index_sid,
+ demux->current_partition->partition.this_partition);
+ else
+ GST_DEBUG_OBJECT (demux, "Haven't found partition for offset yet");
+ }
+
+ static guint64
+ find_closest_offset (GArray * offsets, gint64 * position, gboolean keyframe)
+ {
+ GstMXFDemuxIndex *idx;
+ gint64 current_position = *position;
+
+ if (!offsets || offsets->len == 0)
+ return -1;
+
+ current_position = MIN (current_position, offsets->len - 1);
+
+ idx = &g_array_index (offsets, GstMXFDemuxIndex, current_position);
+ while (idx->offset == 0 || (keyframe && !idx->keyframe)) {
+ current_position--;
+ if (current_position < 0)
+ break;
+ idx = &g_array_index (offsets, GstMXFDemuxIndex, current_position);
+ }
+
+ if (idx->offset != 0 && (!keyframe || idx->keyframe)) {
+ *position = current_position;
+ return idx->offset;
+ }
+
+ return -1;
+ }
+
+ static guint64
+ gst_mxf_demux_find_essence_element (GstMXFDemux * demux,
+ GstMXFDemuxEssenceTrack * etrack, gint64 * position, gboolean keyframe)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 old_offset = demux->offset;
+ GstMXFDemuxPartition *old_partition = demux->current_partition;
+ gint i;
+ guint64 offset;
+ gint64 requested_position = *position, index_start_position;
+ GstMXFDemuxIndex index_entry = { 0, };
+
+ GST_DEBUG_OBJECT (demux, "Trying to find essence element %" G_GINT64_FORMAT
+ " of track 0x%08x with body_sid %u (keyframe %d)", *position,
+ etrack->track_number, etrack->body_sid, keyframe);
+
+ /* Get entry from index table if present */
+ if (find_edit_entry (demux, etrack, *position, keyframe, &index_entry)) {
+ GST_DEBUG_OBJECT (demux,
+ "Got position %" G_GINT64_FORMAT " at offset %" G_GUINT64_FORMAT,
+ index_entry.dts, index_entry.offset);
+ *position = index_entry.dts;
+ return index_entry.offset;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Not found in index table");
+
+ /* Fallback to track offsets */
+
+ if (!demux->random_access) {
+ /* Best effort for push mode */
+ offset = find_closest_offset (etrack->offsets, position, keyframe);
+ if (offset != -1)
+ GST_DEBUG_OBJECT (demux,
+ "Starting with edit unit %" G_GINT64_FORMAT " for %" G_GINT64_FORMAT
+ " in generated index at offset %" G_GUINT64_FORMAT, *position,
+ requested_position, offset);
+ return offset;
+ }
+
+ if (etrack->duration > 0 && *position >= etrack->duration) {
+ GST_WARNING_OBJECT (demux, "Position after end of essence track");
+ return -1;
+ }
+
+ from_track_offset:
+
+ index_start_position = *position;
+
+ demux->offset = demux->run_in;
+
+ offset = find_closest_offset (etrack->offsets, &index_start_position, FALSE);
+ if (offset != -1) {
+ demux->offset = offset + demux->run_in;
+ GST_DEBUG_OBJECT (demux,
+ "Starting with edit unit %" G_GINT64_FORMAT " for %" G_GINT64_FORMAT
+ " in generated index at offset %" G_GUINT64_FORMAT,
+ index_start_position, requested_position, offset);
+ } else {
+ index_start_position = -1;
+ }
+
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (index_start_position != -1 && t == etrack)
+ t->position = index_start_position;
+ else
+ t->position = (demux->offset == demux->run_in) ? 0 : -1;
+ GST_LOG_OBJECT (demux, "Setting track %d position to %" G_GINT64_FORMAT,
+ t->track_id, t->position);
+ }
+
+ /* Else peek at all essence elements and complete our
+ * index until we find the requested element
+ */
+ while (ret == GST_FLOW_OK) {
+ GstMXFKLV klv;
+
+ GST_LOG_OBJECT (demux, "Pulling from offset %" G_GINT64_FORMAT,
+ demux->offset);
+ ret = gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv);
+
+ if (ret == GST_FLOW_EOS) {
+ /* Handle EOS */
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ i);
+
+ if (t->position > 0)
+ t->duration = t->position;
+ }
+ /* For the searched track this is really our position */
+ etrack->duration = etrack->position;
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+
+ if (!p->eos
+ && p->current_essence_track_position >=
+ p->current_essence_track->duration) {
+ GstEvent *e;
+
+ p->eos = TRUE;
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (p), e);
+ }
+ }
+ }
+
+ GST_LOG_OBJECT (demux,
+ "pulling gave flow:%s track->position:%" G_GINT64_FORMAT,
+ gst_flow_get_name (ret), etrack->position);
+ if (G_UNLIKELY (ret != GST_FLOW_OK) && etrack->position <= *position) {
+ demux->offset = old_offset;
+ demux->current_partition = old_partition;
+ break;
+ } else if (G_UNLIKELY (ret == GST_FLOW_OK)) {
+ ret = gst_mxf_demux_handle_klv_packet (demux, &klv, TRUE);
+ gst_mxf_demux_consume_klv (demux, &klv);
+ }
+
+ GST_LOG_OBJECT (demux,
+ "Handling gave flow:%s track->position:%" G_GINT64_FORMAT
+ " looking for %" G_GINT64_FORMAT, gst_flow_get_name (ret),
+ etrack->position, *position);
+
+ /* If we found the position read it from the index again */
+ if (((ret == GST_FLOW_OK && etrack->position == *position + 1) ||
+ (ret == GST_FLOW_EOS && etrack->position == *position + 1))
+ && etrack->offsets && etrack->offsets->len > *position
+ && g_array_index (etrack->offsets, GstMXFDemuxIndex,
+ *position).offset != 0) {
+ GST_DEBUG_OBJECT (demux, "Found at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+ demux->offset = old_offset;
+ demux->current_partition = old_partition;
+ if (find_edit_entry (demux, etrack, *position, keyframe, &index_entry)) {
+ GST_DEBUG_OBJECT (demux,
+ "Got position %" G_GINT64_FORMAT " at offset %" G_GUINT64_FORMAT,
+ index_entry.dts, index_entry.offset);
+ *position = index_entry.dts;
+ return index_entry.offset;
+ }
+ goto from_track_offset;
+ }
+ }
+ demux->offset = old_offset;
+ demux->current_partition = old_partition;
+
+ GST_DEBUG_OBJECT (demux, "Not found in this file");
+
+ return -1;
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_pull_and_handle_klv_packet (GstMXFDemux * demux)
+ {
+ GstMXFKLV klv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean force_switch = FALSE;
+
+ if (demux->src->len > 0) {
+ if (!gst_mxf_demux_get_earliest_pad (demux)) {
+ ret = GST_FLOW_EOS;
+ GST_DEBUG_OBJECT (demux, "All tracks are EOS");
+ goto beach;
+ }
+ }
+
+ if (demux->state == GST_MXF_DEMUX_STATE_ESSENCE) {
+ g_assert (demux->current_partition->single_track
+ && demux->current_partition->single_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING);
+ /* Feeding essence directly (i.e. in the middle of a custom/clip KLV) */
+ ret =
+ gst_mxf_demux_handle_generic_container_essence_element (demux,
+ &demux->current_partition->clip_klv, FALSE);
+ gst_mxf_demux_consume_klv (demux, &demux->current_partition->clip_klv);
+ if (ret == GST_FLOW_OK
+ && demux->current_partition->single_track->position >=
+ demux->current_partition->single_track->duration) {
+ /* We are done with the contents of this clip/custom wrapping, force the
+ * switch to the next non-EOS track */
+ GST_DEBUG_OBJECT (demux, "Single track EOS, switch");
+ force_switch = TRUE;
+ }
+
+ } else {
+
+ ret = gst_mxf_demux_peek_klv_packet (demux, demux->offset, &klv);
+
+ /* FIXME
+ *
+ * Move this EOS handling to a separate function
+ */
+ if (ret == GST_FLOW_EOS && demux->src->len > 0) {
+ guint i;
+ GstMXFDemuxPad *p = NULL;
+
+ GST_DEBUG_OBJECT (demux, "EOS HANDLING");
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+
+ GST_DEBUG_OBJECT (p,
+ "eos:%d current_essence_track_position:%" G_GINT64_FORMAT
+ " position:%" G_GINT64_FORMAT " duration:%" G_GINT64_FORMAT, p->eos,
+ p->current_essence_track_position,
+ p->current_essence_track->position,
+ p->current_essence_track->duration);
+ if (!p->eos
+ && p->current_essence_track->position >=
+ p->current_essence_track->duration) {
+ GstEvent *e;
+
+ p->eos = TRUE;
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (p), e);
+ }
+ }
+
+ while ((p = gst_mxf_demux_get_earliest_pad (demux))) {
+ guint64 offset;
+ gint64 position;
+
+ GST_DEBUG_OBJECT (p, "Trying on earliest");
+
+ position = p->current_essence_track_position;
+
+ offset =
+ gst_mxf_demux_find_essence_element (demux, p->current_essence_track,
+ &position, FALSE);
+ if (offset == -1) {
+ GstEvent *e;
+
+ GST_ERROR_OBJECT (demux, "Failed to find offset for essence track");
+ p->eos = TRUE;
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (p), e);
+ continue;
+ }
+
+ demux->offset = offset + demux->run_in;
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+ if (p->current_essence_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ demux->state = GST_MXF_DEMUX_STATE_ESSENCE;
+ demux->current_partition->clip_klv.consumed =
+ offset - demux->current_partition->clip_klv.offset;
+ } else
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+ p->current_essence_track->position = position;
+
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ }
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ ret = gst_mxf_demux_handle_klv_packet (demux, &klv, FALSE);
+ gst_mxf_demux_consume_klv (demux, &klv);
+
+ /* We entered a new partition */
+ if (ret == GST_FLOW_OK && mxf_is_partition_pack (&klv.key)) {
+ GstMXFDemuxPartition *partition = demux->current_partition;
+
+ /* Grab footer metadata if needed */
+ if (demux->pull_footer_metadata
+ && partition->partition.type == MXF_PARTITION_PACK_HEADER
+ && (!partition->partition.closed || !partition->partition.complete)
+ && (demux->footer_partition_pack_offset != 0
+ || demux->random_index_pack)) {
+ GST_DEBUG_OBJECT (demux,
+ "Open or incomplete header partition, trying to get final metadata from the last partitions");
+ gst_mxf_demux_parse_footer_metadata (demux);
+ demux->pull_footer_metadata = FALSE;
+ }
+
+ /* If the partition has some content, do post-checks */
+ if (partition->partition.body_sid != 0) {
+ guint64 lowest_offset = G_MAXUINT64;
+ GST_DEBUG_OBJECT (demux,
+ "Entered partition (body_sid:%d index_sid:%d body_offset:%"
+ G_GUINT64_FORMAT "), checking positions",
+ partition->partition.body_sid, partition->partition.index_sid,
+ partition->partition.body_offset);
+
+ if (partition->single_track) {
+ /* Fast-path for single track partition */
+ if (partition->single_track->position == -1
+ && partition->partition.body_offset == 0) {
+ GST_DEBUG_OBJECT (demux,
+ "First time in partition, setting track position to 0");
+ partition->single_track->position = 0;
+ } else if (partition->single_track->position == -1) {
+ GST_ERROR_OBJECT (demux,
+ "Unknown track position, consuming data from first partition entry");
+ lowest_offset =
+ partition->partition.this_partition +
+ partition->essence_container_offset;
+ partition->clip_klv.consumed = 0;
+ } else if (partition->single_track->position != 0) {
+ GstMXFDemuxIndex entry;
+ GST_DEBUG_OBJECT (demux,
+ "Track already at another position : %" G_GINT64_FORMAT,
+ partition->single_track->position);
+ if (find_edit_entry (demux, partition->single_track,
+ partition->single_track->position, FALSE, &entry))
+ lowest_offset = entry.offset;
+ }
+ } else {
+ guint i;
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *etrack =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ i);
+
+ if (etrack->body_sid != partition->partition.body_sid)
+ continue;
+ if (etrack->position == -1 && partition->partition.body_offset == 0) {
+ GST_DEBUG_OBJECT (demux, "Resetting track %d to position 0",
+ etrack->track_id);
+
+ etrack->position = 0;
+ } else if (etrack->position != 0) {
+ GstMXFDemuxIndex entry;
+ if (find_edit_entry (demux, etrack,
+ etrack->position, FALSE, &entry)) {
+ if (lowest_offset == G_MAXUINT64
+ || entry.offset < lowest_offset)
+ lowest_offset = entry.offset;
+ }
+ }
+ }
+ }
+
+ if (lowest_offset != G_MAXUINT64) {
+ GstMXFDemuxPartition *next_partition = NULL;
+ GList *cur_part = g_list_find (demux->partitions, partition);
+ if (cur_part && cur_part->next)
+ next_partition = (GstMXFDemuxPartition *) cur_part->next->data;
+
+ /* If we have completely processed this partition, skip to next partition */
+ if (lowest_offset > next_partition->partition.this_partition) {
+ GST_DEBUG_OBJECT (demux,
+ "Partition entirely processed, skipping to next one");
+ demux->offset = next_partition->partition.this_partition;
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping to demuxer offset %" G_GUINT64_FORMAT " (from %"
+ G_GUINT64_FORMAT ")", lowest_offset, demux->offset);
+ demux->offset = lowest_offset;
+ if (partition->single_track
+ && partition->single_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ demux->state = GST_MXF_DEMUX_STATE_ESSENCE;
+ demux->current_partition->clip_klv.consumed =
+ demux->offset - demux->current_partition->clip_klv.offset;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if (ret == GST_FLOW_OK && demux->src->len > 0
+ && demux->essence_tracks->len > 0) {
+ GstMXFDemuxPad *earliest = NULL;
+ /* We allow time drifts of at most 500ms */
+ while ((earliest = gst_mxf_demux_get_earliest_pad (demux)) && (force_switch
+ || demux->segment.position - earliest->position >
+ demux->max_drift)) {
+ guint64 offset;
+ gint64 position;
+
+ GST_DEBUG_OBJECT (demux,
+ "Found synchronization issue -- trying to solve");
+
+ position = earliest->current_essence_track_position;
+
+ /* FIXME: This can probably be improved by using the
+ * offset of position-1 if it's in the same partition
+ * or the start of the position otherwise.
+ * This way we won't skip elements from the same essence
+ * container as etrack->position
+ */
+ offset =
+ gst_mxf_demux_find_essence_element (demux,
+ earliest->current_essence_track, &position, FALSE);
+ if (offset == -1) {
+ GstEvent *e;
+
+ GST_WARNING_OBJECT (demux,
+ "Failed to find offset for late essence track");
+ earliest->eos = TRUE;
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_pad_push_event (GST_PAD_CAST (earliest), e);
+ continue;
+ }
+
+ demux->offset = offset + demux->run_in;
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+ GST_DEBUG_OBJECT (demux,
+ "Switching to offset %" G_GUINT64_FORMAT " for position %"
+ G_GINT64_FORMAT " on track %d (body_sid:%d index_sid:%d)",
+ demux->offset, position, earliest->current_essence_track->track_id,
+ earliest->current_essence_track->body_sid,
+ earliest->current_essence_track->index_sid);
+ if (demux->current_partition->single_track
+ && demux->current_partition->single_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING) {
+ demux->state = GST_MXF_DEMUX_STATE_ESSENCE;
+ demux->current_partition->clip_klv.consumed =
+ offset - demux->current_partition->clip_klv.offset;
+ } else
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+
+ earliest->current_essence_track->position = position;
+ GST_DEBUG_OBJECT (earliest, "Switching to this pad");
+ break;
+ }
+ }
+
+ beach:
+ return ret;
+ }
+
+ static void
+ gst_mxf_demux_loop (GstPad * pad)
+ {
+ GstMXFDemux *demux = NULL;
+ GstFlowReturn flow = GST_FLOW_OK;
+
+ demux = GST_MXF_DEMUX (gst_pad_get_parent (pad));
+
+ if (demux->state == GST_MXF_DEMUX_STATE_UNKNOWN) {
+ GstMXFKLV klv;
+
+ /* Skip run-in, which is at most 64K and is finished
+ * by a header partition pack */
+ while (demux->offset < 64 * 1024) {
+ if ((flow =
+ gst_mxf_demux_peek_klv_packet (demux, demux->offset,
+ &klv)) != GST_FLOW_OK)
+ goto pause;
+
+ if (mxf_is_header_partition_pack (&klv.key)) {
+ GST_DEBUG_OBJECT (demux,
+ "Found header partition pack at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+ demux->run_in = demux->offset;
+ break;
+ }
+ demux->offset++;
+ }
+
+ if (G_UNLIKELY (demux->run_in == -1)) {
+ GST_ERROR_OBJECT (demux, "No valid header partition pack found");
+ flow = GST_FLOW_ERROR;
+ goto pause;
+ }
+
+ /* Grab the RIP at the end of the file (if present) */
+ gst_mxf_demux_pull_random_index_pack (demux);
+ }
+
+ /* Now actually do something */
+ flow = gst_mxf_demux_pull_and_handle_klv_packet (demux);
+
+ /* pause if something went wrong */
+ if (G_UNLIKELY (flow != GST_FLOW_OK))
+ goto pause;
+
+ /* check EOS condition */
+ if ((demux->segment.stop != -1) &&
+ (demux->segment.position >= demux->segment.stop)) {
+ guint i;
+ gboolean eos = TRUE;
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+
+ if (!p->eos && p->position < demux->segment.stop) {
+ eos = FALSE;
+ break;
+ }
+ }
+
+ if (eos) {
+ flow = GST_FLOW_EOS;
+ goto pause;
+ }
+ }
+
+ gst_object_unref (demux);
+
+ return;
+
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (flow);
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (flow == GST_FLOW_EOS) {
+ /* perform EOS logic */
+ if (demux->src->len == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE,
+ ("This stream contains no data."),
+ ("got eos and didn't find any streams"));
+ } else if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+ GstMessage *m;
+ GstEvent *e;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ m = gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, stop);
+ gst_message_set_seqnum (m, demux->seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (demux), m);
+ e = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_mxf_demux_push_src_event (demux, e);
+ } else {
+ GstEvent *e;
+
+ /* normal playback, send EOS to all linked pads */
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ if (!gst_mxf_demux_push_src_event (demux, e)) {
+ GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
+ }
+ }
+ } else if (flow == GST_FLOW_NOT_LINKED || flow < GST_FLOW_EOS) {
+ GstEvent *e;
+
+ GST_ELEMENT_FLOW_ERROR (demux, flow);
+ e = gst_event_new_eos ();
+ gst_event_set_seqnum (e, demux->seqnum);
+ gst_mxf_demux_push_src_event (demux, e);
+ }
+ gst_object_unref (demux);
+ return;
+ }
+ }
+
+ static GstFlowReturn
+ gst_mxf_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMXFDemux *demux = NULL;
+ const guint8 *data = NULL;
+ gboolean res;
+ GstMXFKLV klv;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ demux = GST_MXF_DEMUX (parent);
+
+ GST_LOG_OBJECT (demux,
+ "received buffer of %" G_GSIZE_FORMAT " bytes at offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (inbuf), GST_BUFFER_OFFSET (inbuf));
+
+ if (demux->src->len > 0) {
+ if (!gst_mxf_demux_get_earliest_pad (demux)) {
+ ret = GST_FLOW_EOS;
+ GST_DEBUG_OBJECT (demux, "All tracks are EOS");
+ return ret;
+ }
+ }
+
+ if (G_UNLIKELY (GST_BUFFER_OFFSET (inbuf) == 0)) {
+ GST_DEBUG_OBJECT (demux, "beginning of file, expect header");
+ demux->run_in = -1;
+ demux->offset = 0;
+ demux->state = GST_MXF_DEMUX_STATE_UNKNOWN;
+ }
+
+ if (G_UNLIKELY (demux->offset == 0 && GST_BUFFER_OFFSET (inbuf) != 0)) {
+ GST_DEBUG_OBJECT (demux, "offset was zero, synchronizing with buffer's");
+ if (GST_BUFFER_OFFSET_IS_VALID (inbuf))
+ demux->offset = GST_BUFFER_OFFSET (inbuf);
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+ } else if (demux->current_partition == NULL) {
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+ }
+
+ gst_adapter_push (demux->adapter, inbuf);
+ inbuf = NULL;
+
+ while (ret == GST_FLOW_OK) {
+ if (G_UNLIKELY (demux->flushing)) {
+ GST_DEBUG_OBJECT (demux, "we are now flushing, exiting parser loop");
+ ret = GST_FLOW_FLUSHING;
+ break;
+ }
+
+ if (gst_adapter_available (demux->adapter) < 16)
+ break;
+
+ if (demux->state == GST_MXF_DEMUX_STATE_UNKNOWN) {
+ /* Skip run-in, which is at most 64K and is finished
+ * by a header partition pack */
+
+ while (demux->offset < 64 * 1024
+ && gst_adapter_available (demux->adapter) >= 16) {
+ data = gst_adapter_map (demux->adapter, 16);
+ res = mxf_is_header_partition_pack ((const MXFUL *) data);
+ gst_adapter_unmap (demux->adapter);
+
+ if (res) {
+ GST_DEBUG_OBJECT (demux,
+ "Found header partition pack at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+ demux->run_in = demux->offset;
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+ break;
+ }
+ gst_adapter_flush (demux->adapter, 1);
+ demux->offset++;
+ }
+ } else if (demux->offset < demux->run_in) {
+ guint64 flush = MIN (gst_adapter_available (demux->adapter),
+ demux->run_in - demux->offset);
+ gst_adapter_flush (demux->adapter, flush);
+ demux->offset += flush;
+ continue;
+ }
+
+ if (demux->state == GST_MXF_DEMUX_STATE_UNKNOWN) {
+ /* Need more data */
+ if (demux->offset < 64 * 1024)
+ break;
+
+ GST_ERROR_OBJECT (demux, "No valid header partition pack found");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if (gst_adapter_available (demux->adapter) < 17)
+ break;
+
+ /* FIXME : Handle non-klv state */
+ g_assert (demux->state == GST_MXF_DEMUX_STATE_KLV);
+
+ /* Now actually do something */
+ memset (&klv, 0, sizeof (GstMXFKLV));
+
+ /* Pull 16 byte key and first byte of BER encoded length */
+ data = gst_adapter_map (demux->adapter, 17);
+
+ memcpy (&klv.key, data, 16);
+
+ GST_DEBUG_OBJECT (demux, "Got KLV packet with key %s",
+ mxf_ul_to_string (&klv.key, str));
+
+ /* Decode BER encoded packet length */
+ if ((data[16] & 0x80) == 0) {
+ klv.length = data[16];
+ klv.data_offset = 17;
+ } else {
+ guint slen = data[16] & 0x7f;
+
+ klv.data_offset = 16 + 1 + slen;
+
+ gst_adapter_unmap (demux->adapter);
+
+ /* Must be at most 8 according to SMPTE-379M 5.3.4 and
+ * GStreamer buffers can only have a 4 bytes length */
+ if (slen > 8) {
+ GST_ERROR_OBJECT (demux, "Invalid KLV packet length: %u", slen);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if (gst_adapter_available (demux->adapter) < 17 + slen)
+ break;
+
+ data = gst_adapter_map (demux->adapter, 17 + slen);
+ data += 17;
+
+ klv.length = 0;
+ while (slen) {
+ klv.length = (klv.length << 8) | *data;
+ data++;
+ slen--;
+ }
+ }
+
+ gst_adapter_unmap (demux->adapter);
+
+ /* GStreamer's buffer sizes are stored in a guint so we
+ * limit ourself to G_MAXUINT large buffers */
+ if (klv.length > G_MAXUINT) {
+ GST_ERROR_OBJECT (demux,
+ "Unsupported KLV packet length: %" G_GSIZE_FORMAT, klv.length);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "KLV packet with key %s has length "
+ "%" G_GSIZE_FORMAT, mxf_ul_to_string (&klv.key, str), klv.length);
+
+ if (gst_adapter_available (demux->adapter) < klv.data_offset + klv.length)
+ break;
+
+ gst_adapter_flush (demux->adapter, klv.data_offset);
+
+ if (klv.length > 0) {
+ klv.data = gst_adapter_take_buffer (demux->adapter, klv.length);
+
+ ret = gst_mxf_demux_handle_klv_packet (demux, &klv, FALSE);
+ }
+ gst_mxf_demux_consume_klv (demux, &klv);
+ }
+
+ return ret;
+ }
+
+ /* Given a stream time for an output pad, figure out:
+ * * The Essence track for that stream time
+ * * The position on that track
+ */
+ static gboolean
+ gst_mxf_demux_pad_to_track_and_position (GstMXFDemux * demux,
+ GstMXFDemuxPad * pad, GstClockTime streamtime,
+ GstMXFDemuxEssenceTrack ** etrack, gint64 * position)
+ {
+ gint64 material_position;
+ guint64 sum = 0;
+ guint i;
+ MXFMetadataSourceClip *clip = NULL;
+ gchar str[96];
+
+ /* Convert to material position */
+ material_position =
+ gst_util_uint64_scale (streamtime, pad->material_track->edit_rate.n,
+ pad->material_track->edit_rate.d * GST_SECOND);
+
+ GST_DEBUG_OBJECT (pad,
+ "streamtime %" GST_TIME_FORMAT " position %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (streamtime), material_position);
+
+
+ /* Find sequence component covering that position */
+ for (i = 0; i < pad->material_track->parent.sequence->n_structural_components;
+ i++) {
+ clip =
+ MXF_METADATA_SOURCE_CLIP (pad->material_track->parent.sequence->
+ structural_components[i]);
+ GST_LOG_OBJECT (pad,
+ "clip %d start_position:%" G_GINT64_FORMAT " duration %"
+ G_GINT64_FORMAT, clip->source_track_id, clip->start_position,
+ clip->parent.duration);
+ if (clip->parent.duration <= 0)
+ break;
+ if ((sum + clip->parent.duration) > material_position)
+ break;
+ sum += clip->parent.duration;
+ }
+
+ if (i == pad->material_track->parent.sequence->n_structural_components) {
+ GST_WARNING_OBJECT (pad, "Requested position beyond the last clip");
+ /* Outside of current components. Setting to the end of the last clip */
+ material_position = sum;
+ sum -= clip->parent.duration;
+ }
+
+ GST_DEBUG_OBJECT (pad, "Looking for essence track for track_id:%d umid:%s",
+ clip->source_track_id, mxf_umid_to_string (&clip->source_package_id,
+ str));
+
+ /* Get the corresponding essence track for the given source package and stream id */
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *track =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+ GST_LOG_OBJECT (pad,
+ "Looking at essence track body_sid:%d index_sid:%d",
+ track->body_sid, track->index_sid);
+ if (clip->source_track_id == 0 || (track->track_id == clip->source_track_id
+ && mxf_umid_is_equal (&clip->source_package_id,
+ &track->source_package_uid))) {
+ GST_DEBUG_OBJECT (pad,
+ "Found matching essence track body_sid:%d index_sid:%d",
+ track->body_sid, track->index_sid);
+ *etrack = track;
+ *position = material_position - sum;
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+ }
+
+ /* Given a track+position for a given pad, figure out the resulting stream time */
+ static gboolean
+ gst_mxf_demux_pad_get_stream_time (GstMXFDemux * demux,
+ GstMXFDemuxPad * pad, GstMXFDemuxEssenceTrack * etrack,
+ gint64 position, GstClockTime * stream_time)
+ {
+ guint i;
+ guint64 sum = 0;
+ MXFMetadataSourceClip *clip = NULL;
+
+ /* Find the component for that */
+ /* Find sequence component covering that position */
+ for (i = 0; i < pad->material_track->parent.sequence->n_structural_components;
+ i++) {
+ clip =
+ MXF_METADATA_SOURCE_CLIP (pad->material_track->parent.sequence->
+ structural_components[i]);
+ GST_LOG_OBJECT (pad,
+ "clip %d start_position:%" G_GINT64_FORMAT " duration %"
+ G_GINT64_FORMAT, clip->source_track_id, clip->start_position,
+ clip->parent.duration);
+ if (etrack->track_id == clip->source_track_id
+ && mxf_umid_is_equal (&clip->source_package_id,
+ &etrack->source_package_uid)) {
+ /* This is the clip */
+ break;
+ }
+ /* Fetch in the next one */
+ sum += clip->parent.duration;
+ }
+
+ /* Theoretically impossible */
+ if (i == pad->material_track->parent.sequence->n_structural_components) {
+ /* Outside of current components ?? */
+ return FALSE;
+ }
+
+ *stream_time =
+ gst_util_uint64_scale (position + sum,
+ pad->material_track->edit_rate.d * GST_SECOND,
+ pad->material_track->edit_rate.n);
+
+ return TRUE;
+ }
+
+ static void
+ gst_mxf_demux_pad_set_position (GstMXFDemux * demux, GstMXFDemuxPad * p,
+ GstClockTime start)
+ {
+ guint i;
+ guint64 sum = 0;
+ MXFMetadataSourceClip *clip = NULL;
+
+ if (!p->current_component) {
+ p->current_essence_track_position =
+ gst_util_uint64_scale (start, p->material_track->edit_rate.n,
+ p->material_track->edit_rate.d * GST_SECOND);
+
+ if (p->current_essence_track_position >= p->current_essence_track->duration
+ && p->current_essence_track->duration > 0) {
+ p->current_essence_track_position = p->current_essence_track->duration;
+ p->position =
+ gst_util_uint64_scale (p->current_essence_track->duration,
+ p->material_track->edit_rate.d * GST_SECOND,
+ p->material_track->edit_rate.n);
+ } else {
+ p->position = start;
+ }
+ p->position_accumulated_error = 0.0;
+ p->current_material_track_position = p->current_essence_track_position;
+
+ return;
+ }
+
+ for (i = 0; i < p->material_track->parent.sequence->n_structural_components;
+ i++) {
+ clip =
+ MXF_METADATA_SOURCE_CLIP (p->material_track->parent.sequence->
+ structural_components[i]);
+
+ if (clip->parent.duration <= 0)
+ break;
+
+ sum += clip->parent.duration;
+
+ if (gst_util_uint64_scale (sum, p->material_track->edit_rate.d * GST_SECOND,
+ p->material_track->edit_rate.n) > start)
+ break;
+ }
+
+ if (i == p->material_track->parent.sequence->n_structural_components) {
+ p->position =
+ gst_util_uint64_scale (sum, p->material_track->edit_rate.d * GST_SECOND,
+ p->material_track->edit_rate.n);
+ p->position_accumulated_error = 0.0;
+ p->current_material_track_position = sum;
+
+ gst_mxf_demux_pad_set_component (demux, p, i);
+ return;
+ }
+
+ if (clip->parent.duration > 0)
+ sum -= clip->parent.duration;
+
+ start -=
+ gst_util_uint64_scale (sum, p->material_track->edit_rate.d * GST_SECOND,
+ p->material_track->edit_rate.n);
+
+ gst_mxf_demux_pad_set_component (demux, p, i);
+
+ {
+ gint64 essence_offset = gst_util_uint64_scale (start,
+ p->current_essence_track->source_track->edit_rate.n,
+ p->current_essence_track->source_track->edit_rate.d * GST_SECOND);
+
+ p->current_essence_track_position += essence_offset;
+
+ p->position = gst_util_uint64_scale (sum,
+ GST_SECOND * p->material_track->edit_rate.d,
+ p->material_track->edit_rate.n) + gst_util_uint64_scale (essence_offset,
+ GST_SECOND * p->current_essence_track->source_track->edit_rate.d,
+ p->current_essence_track->source_track->edit_rate.n);
+ p->position_accumulated_error = 0.0;
+ p->current_material_track_position = sum + essence_offset;
+ }
+
+ if (p->current_essence_track_position >= p->current_essence_track->duration
+ && p->current_essence_track->duration > 0) {
+ p->current_essence_track_position = p->current_essence_track->duration;
+ p->position =
+ gst_util_uint64_scale (sum + p->current_component->parent.duration,
+ p->material_track->edit_rate.d * GST_SECOND,
+ p->material_track->edit_rate.n);
+ p->position_accumulated_error = 0.0;
+ p->current_material_track_position =
+ sum + p->current_component->parent.duration;
+ }
+ }
+
+ static gboolean
+ gst_mxf_demux_seek_push (GstMXFDemux * demux, GstEvent * event)
+ {
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, keyframe;
+ GstSegment seeksegment;
+ guint i;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ if (rate <= 0.0)
+ goto wrong_rate;
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->segment.position) {
+ gboolean ret;
+ guint64 new_offset = -1;
+ GstEvent *e;
+
+ if (!demux->metadata_resolved || demux->update_metadata) {
+ if (gst_mxf_demux_resolve_references (demux) != GST_FLOW_OK ||
+ gst_mxf_demux_update_tracks (demux) != GST_FLOW_OK) {
+ goto unresolved_metadata;
+ }
+ }
+
+ /* Do the actual seeking */
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+ gint64 position;
+ guint64 off;
+
+ /* Reset EOS flag on all pads */
+ p->eos = FALSE;
+ gst_mxf_demux_pad_set_position (demux, p, start);
+
+ position = p->current_essence_track_position;
+ off = gst_mxf_demux_find_essence_element (demux, p->current_essence_track,
+ &position, keyframe);
+ new_offset = MIN (off, new_offset);
+ p->discont = TRUE;
+ }
+
+ if (new_offset == -1)
+ goto no_new_offset;
+
+ new_offset += demux->run_in;
+
+ GST_DEBUG_OBJECT (demux, "generating an upstream seek at position %"
+ G_GUINT64_FORMAT, new_offset);
+ e = gst_event_new_seek (seeksegment.rate, GST_FORMAT_BYTES,
+ seeksegment.flags | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET,
+ new_offset, GST_SEEK_TYPE_NONE, 0);
+ gst_event_set_seqnum (e, seqnum);
+ ret = gst_pad_push_event (demux->sinkpad, e);
+
+ if (G_UNLIKELY (!ret)) {
+ goto seek_failed;
+ }
+ }
+
+ /* Tell all the stream a new segment is needed */
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+ p->need_segment = TRUE;
+ }
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+ t->position = -1;
+ }
+
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ return TRUE;
+
+ /* ERRORS */
+ wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "seeking only supported in TIME format");
+ return gst_pad_push_event (demux->sinkpad, gst_event_ref (event));
+ }
+ wrong_rate:
+ {
+ GST_WARNING_OBJECT (demux, "only rates > 0.0 are allowed");
+ return FALSE;
+ }
+ unresolved_metadata:
+ {
+ GST_WARNING_OBJECT (demux, "metadata can't be resolved");
+ return gst_pad_push_event (demux->sinkpad, gst_event_ref (event));
+ }
+ seek_failed:
+ {
+ GST_WARNING_OBJECT (demux, "upstream seek failed");
+ return gst_pad_push_event (demux->sinkpad, gst_event_ref (event));
+ }
+ no_new_offset:
+ {
+ GST_WARNING_OBJECT (demux, "can't find new offset");
+ return gst_pad_push_event (demux->sinkpad, gst_event_ref (event));
+ }
+ }
+
+ static void
+ collect_index_table_segments (GstMXFDemux * demux)
+ {
+ GList *l;
+ guint i;
+ guint64 old_offset = demux->offset;
+ GstMXFDemuxPartition *old_partition = demux->current_partition;
+
+ /* This function can also be called when a RIP is not present. This can happen
+ * if index table segments were discovered while scanning the file */
+ if (demux->random_index_pack) {
+ for (i = 0; i < demux->random_index_pack->len; i++) {
+ MXFRandomIndexPackEntry *e =
+ &g_array_index (demux->random_index_pack, MXFRandomIndexPackEntry, i);
+
+ if (e->offset < demux->run_in) {
+ GST_ERROR_OBJECT (demux, "Invalid random index pack entry");
+ return;
+ }
+
+ demux->offset = e->offset;
+ read_partition_header (demux);
+ }
+
+ demux->offset = old_offset;
+ demux->current_partition = old_partition;
+ }
+
+ if (demux->pending_index_table_segments == NULL) {
+ GST_DEBUG_OBJECT (demux, "No pending index table segments to collect");
+ return;
+ }
+
+ GST_LOG_OBJECT (demux, "Collecting pending index table segments");
+
+ for (l = demux->pending_index_table_segments; l; l = l->next) {
+ MXFIndexTableSegment *segment = l->data;
+ GstMXFDemuxIndexTable *t = NULL;
+ GList *k;
+ guint didx;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar str[48];
+ #endif
+
+ GST_LOG_OBJECT (demux,
+ "Collecting from segment bodySID:%d indexSID:%d instance_id: %s",
+ segment->body_sid, segment->index_sid,
+ mxf_uuid_to_string (&segment->instance_id, str));
+
+ for (k = demux->index_tables; k; k = k->next) {
+ GstMXFDemuxIndexTable *tmp = k->data;
+
+ if (tmp->body_sid == segment->body_sid
+ && tmp->index_sid == segment->index_sid) {
+ t = tmp;
+ break;
+ }
+ }
+
+ if (!t) {
+ t = g_new0 (GstMXFDemuxIndexTable, 1);
+ t->body_sid = segment->body_sid;
+ t->index_sid = segment->index_sid;
+ t->max_temporal_offset = 0;
+ t->segments = g_array_new (FALSE, TRUE, sizeof (MXFIndexTableSegment));
+ g_array_set_clear_func (t->segments,
+ (GDestroyNotify) mxf_index_table_segment_reset);
+ t->reordered_delta_entry = -1;
+ t->reverse_temporal_offsets = g_array_new (FALSE, TRUE, 1);
+ demux->index_tables = g_list_prepend (demux->index_tables, t);
+ }
+
+ /* Store index segment */
+ g_array_append_val (t->segments, *segment);
+
+ /* Check if temporal reordering tables should be pre-calculated */
+ for (didx = 0; didx < segment->n_delta_entries; didx++) {
+ MXFDeltaEntry *delta = &segment->delta_entries[didx];
+ if (delta->pos_table_index == -1) {
+ if (t->reordered_delta_entry != -1 && didx != t->reordered_delta_entry)
+ GST_WARNING_OBJECT (demux,
+ "Index Table specifies more than one stream using temporal reordering (%d and %d)",
+ didx, t->reordered_delta_entry);
+ else
+ t->reordered_delta_entry = didx;
+ } else if (delta->pos_table_index > 0)
+ GST_WARNING_OBJECT (delta,
+ "Index Table uses fractional offset, please file a bug");
+ }
+
+ }
+
+ /* Handle temporal offset if present and needed */
+ for (l = demux->index_tables; l; l = l->next) {
+ GstMXFDemuxIndexTable *table = l->data;
+ guint segidx;
+
+ /* No reordered entries, skip */
+ if (table->reordered_delta_entry == -1)
+ continue;
+
+ GST_DEBUG_OBJECT (demux,
+ "bodySID:%d indexSID:%d Calculating reverse temporal offset table",
+ table->body_sid, table->index_sid);
+
+ for (segidx = 0; segidx < table->segments->len; segidx++) {
+ MXFIndexTableSegment *s =
+ &g_array_index (table->segments, MXFIndexTableSegment, segidx);
+ guint start = s->index_start_position;
+ guint stop =
+ s->index_duration ? start + s->index_duration : start +
+ s->n_index_entries;
+ guint entidx = 0;
+
+ if (stop > table->reverse_temporal_offsets->len)
+ g_array_set_size (table->reverse_temporal_offsets, stop);
+
+ for (entidx = 0; entidx < s->n_index_entries; entidx++) {
+ MXFIndexEntry *entry = &s->index_entries[entidx];
+ gint8 offs = -entry->temporal_offset;
+ /* Check we don't exceed boundaries */
+ if ((start + entidx + entry->temporal_offset) < 0 ||
+ (start + entidx + entry->temporal_offset) >
+ table->reverse_temporal_offsets->len) {
+ GST_ERROR_OBJECT (demux,
+ "Temporal offset exceeds boundaries. entry:%d offset:%d max:%d",
+ start + entidx, entry->temporal_offset,
+ table->reverse_temporal_offsets->len);
+ } else {
+ /* Applying the temporal offset gives us the entry that should contain this PTS.
+ * We store the reverse temporal offset on that entry, i.e. the value it should apply
+ * to go from DTS to PTS. (i.e. entry.pts = entry.dts + rto[idx]) */
+ g_array_index (table->reverse_temporal_offsets, gint8,
+ start + entidx + entry->temporal_offset) = offs;
+ if (entry->temporal_offset > (gint) table->max_temporal_offset) {
+ GST_LOG_OBJECT (demux,
+ "Updating max temporal offset to %d (was %d)",
+ entry->temporal_offset, table->max_temporal_offset);
+ table->max_temporal_offset = entry->temporal_offset;
+ }
+ }
+ }
+ }
+ }
+
+ g_list_free_full (demux->pending_index_table_segments, g_free);
+ demux->pending_index_table_segments = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Done collecting segments");
+ }
+
+ static gboolean
+ gst_mxf_demux_seek_pull (GstMXFDemux * demux, GstEvent * event)
+ {
+ GstClockTime keyunit_ts;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, keyframe;
+ GstSegment seeksegment;
+ guint i;
+ gboolean ret = TRUE;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ if (seqnum == demux->seqnum) {
+ GST_DEBUG_OBJECT (demux, "Already handled requested seek");
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Seek %" GST_PTR_FORMAT, event);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ if (rate <= 0.0)
+ goto wrong_rate;
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+
+ if (!demux->index_table_segments_collected) {
+ collect_index_table_segments (demux);
+ demux->index_table_segments_collected = TRUE;
+ }
+
+ if (flush) {
+ GstEvent *e;
+
+ /* Flush start up and downstream to make sure data flow and loops are
+ idle */
+ e = gst_event_new_flush_start ();
+ gst_event_set_seqnum (e, seqnum);
+ gst_mxf_demux_push_src_event (demux, gst_event_ref (e));
+ gst_pad_push_event (demux->sinkpad, e);
+ } else {
+ /* Pause the pulling task */
+ gst_pad_pause_task (demux->sinkpad);
+ }
+
+ /* Take the stream lock */
+ GST_PAD_STREAM_LOCK (demux->sinkpad);
+
+ if (flush) {
+ GstEvent *e;
+
+ /* Stop flushing upstream we need to pull */
+ e = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (e, seqnum);
+ gst_pad_push_event (demux->sinkpad, e);
+ }
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux,
+ "segment initially configured to %" GST_SEGMENT_FORMAT, &seeksegment);
+
+ /* Initialize and reset ourselves if needed */
+ if (flush || seeksegment.position != demux->segment.position) {
+ GList *tmp;
+ if (!demux->metadata_resolved || demux->update_metadata) {
+ if (gst_mxf_demux_resolve_references (demux) != GST_FLOW_OK ||
+ gst_mxf_demux_update_tracks (demux) != GST_FLOW_OK) {
+ goto unresolved_metadata;
+ }
+ }
+
+ /* Reset all single-track KLV tracking */
+ for (tmp = demux->partitions; tmp; tmp = tmp->next) {
+ GstMXFDemuxPartition *partition = (GstMXFDemuxPartition *) tmp->data;
+ if (partition->single_track) {
+ partition->clip_klv.consumed = 0;
+ }
+ }
+ }
+
+ keyunit_ts = seeksegment.position;
+
+ /* Do a first round without changing positions. This is needed to figure out
+ * the supporting keyframe position (if any) */
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+ GstMXFDemuxEssenceTrack *etrack;
+ gint64 track_pos, seeked_pos;
+
+ /* Get track and track position for requested time, handles out of bound internally */
+ if (!gst_mxf_demux_pad_to_track_and_position (demux, p,
+ seeksegment.position, &etrack, &track_pos))
+ goto invalid_position;
+
+ GST_LOG_OBJECT (p,
+ "track %d (body_sid:%d index_sid:%d), position %" G_GINT64_FORMAT,
+ etrack->track_id, etrack->body_sid, etrack->index_sid, track_pos);
+
+ /* Find supporting keyframe entry */
+ seeked_pos = track_pos;
+ if (gst_mxf_demux_find_essence_element (demux, etrack, &seeked_pos,
+ TRUE) == -1) {
+ /* Couldn't find entry, ignore */
+ break;
+ }
+
+ GST_LOG_OBJECT (p,
+ "track %d (body_sid:%d index_sid:%d), position %" G_GINT64_FORMAT
+ " entry position %" G_GINT64_FORMAT, etrack->track_id, etrack->body_sid,
+ etrack->index_sid, track_pos, seeked_pos);
+
+ if (seeked_pos != track_pos) {
+ GstClockTime stream_time;
+ if (!gst_mxf_demux_pad_get_stream_time (demux, p, etrack, seeked_pos,
+ &stream_time))
+ goto invalid_position;
+ GST_LOG_OBJECT (p, "Need to seek to stream time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream_time));
+ keyunit_ts = MIN (seeksegment.position, stream_time);
+ }
+ }
+
+ if (keyframe && keyunit_ts != seeksegment.position) {
+ GST_INFO_OBJECT (demux, "key unit seek, adjusting segment start to "
+ "%" GST_TIME_FORMAT, GST_TIME_ARGS (keyunit_ts));
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, keyunit_ts, stop_type, stop, &update);
+ }
+
+ /* Finally set the position to the calculated position */
+ if (flush || keyunit_ts != demux->segment.position) {
+ guint64 new_offset = -1;
+
+ /* Do the actual seeking */
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+ gint64 position;
+ guint64 off;
+
+ /* Reset EOS flag on all pads */
+ p->eos = FALSE;
+ gst_mxf_demux_pad_set_position (demux, p, seeksegment.position);
+
+ /* we always want to send data starting with a key unit */
+ position = p->current_essence_track_position;
+ off =
+ gst_mxf_demux_find_essence_element (demux, p->current_essence_track,
+ &position, TRUE);
+ if (off == -1) {
+ GST_DEBUG_OBJECT (demux, "Unable to find offset for pad %s",
+ GST_PAD_NAME (p));
+ p->current_essence_track_position = p->current_essence_track->duration;
+ } else {
+ new_offset = MIN (off, new_offset);
+ if (position != p->current_essence_track_position) {
+ p->position -=
+ gst_util_uint64_scale (p->current_essence_track_position -
+ position,
+ GST_SECOND * p->current_essence_track->source_track->edit_rate.d,
+ p->current_essence_track->source_track->edit_rate.n);
+ p->position_accumulated_error = 0.0;
+ p->current_material_track_position -=
+ gst_util_uint64_scale (p->current_essence_track_position -
+ position,
+ p->material_track->edit_rate.n *
+ p->current_essence_track->source_track->edit_rate.d,
+ p->material_track->edit_rate.d *
+ p->current_essence_track->source_track->edit_rate.n);
+ }
+ p->current_essence_track_position = position;
+ }
+ p->current_essence_track->position = p->current_essence_track_position;
+ p->discont = TRUE;
+ }
+ gst_flow_combiner_reset (demux->flowcombiner);
+ if (new_offset == -1) {
+ GST_WARNING_OBJECT (demux, "No new offset found");
+ ret = FALSE;
+ } else {
+ demux->offset = new_offset + demux->run_in;
+ }
+ gst_mxf_demux_set_partition_for_offset (demux, demux->offset);
+ /* Reset the state accordingly */
+ if (demux->current_partition->single_track
+ && demux->current_partition->single_track->wrapping !=
+ MXF_ESSENCE_WRAPPING_FRAME_WRAPPING)
+ demux->state = GST_MXF_DEMUX_STATE_ESSENCE;
+ else
+ demux->state = GST_MXF_DEMUX_STATE_KLV;
+ }
+
+ if (G_UNLIKELY (demux->close_seg_event)) {
+ gst_event_unref (demux->close_seg_event);
+ demux->close_seg_event = NULL;
+ }
+
+ if (flush) {
+ GstEvent *e;
+
+ /* Stop flushing, the sinks are at time 0 now */
+ e = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (e, seqnum);
+ gst_mxf_demux_push_src_event (demux, e);
+ } else {
+ GST_DEBUG_OBJECT (demux, "closing running segment %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Close the current segment for a linear playback */
+ demux->close_seg_event = gst_event_new_segment (&demux->segment);
+ gst_event_set_seqnum (demux->close_seg_event, demux->seqnum);
+ }
+
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ /* Notify about the start of a new segment */
+ if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *m;
+
+ m = gst_message_new_segment_start (GST_OBJECT (demux),
+ demux->segment.format, demux->segment.position);
+ gst_message_set_seqnum (m, seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), m);
+ }
+
+ /* Tell all the stream a new segment is needed */
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+ p->need_segment = TRUE;
+ }
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+ t->position = -1;
+ }
+
+ demux->seqnum = seqnum;
+
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_mxf_demux_loop, demux->sinkpad, NULL);
+
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+
+ return ret;
+
+ /* ERRORS */
+ wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "seeking only supported in TIME format");
+ return FALSE;
+ }
+ wrong_rate:
+ {
+ GST_WARNING_OBJECT (demux, "only rates > 0.0 are allowed");
+ return FALSE;
+ }
+ unresolved_metadata:
+ {
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_mxf_demux_loop, demux->sinkpad, NULL);
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+ GST_WARNING_OBJECT (demux, "metadata can't be resolved");
+ return FALSE;
+ }
+
+ invalid_position:
+ {
+ if (flush) {
+ GstEvent *e;
+
+ /* Stop flushing, the sinks are at time 0 now */
+ e = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (e, seqnum);
+ gst_mxf_demux_push_src_event (demux, e);
+ }
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_mxf_demux_loop, demux->sinkpad, NULL);
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+ GST_WARNING_OBJECT (demux, "Requested seek position is not valid");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_mxf_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (parent);
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (pad, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (demux->random_access)
+ ret = gst_mxf_demux_seek_pull (demux, event);
+ else
+ ret = gst_mxf_demux_seek_push (demux, event);
+ gst_event_unref (event);
+ break;
+ default:
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_mxf_demux_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (parent);
+ gboolean ret = FALSE;
+ GstMXFDemuxPad *mxfpad = GST_MXF_DEMUX_PAD (pad);
+
+ GST_DEBUG_OBJECT (pad, "handling query %s",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+ gint64 pos;
+
+ gst_query_parse_position (query, &format, NULL);
+ if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT)
+ goto error;
+
+ pos =
+ format ==
+ GST_FORMAT_DEFAULT ? mxfpad->current_material_track_position :
+ mxfpad->position;
+
+ GST_DEBUG_OBJECT (pad,
+ "Returning position %" G_GINT64_FORMAT " in format %s", pos,
+ gst_format_get_name (format));
+
+ gst_query_set_position (query, format, pos);
+ ret = TRUE;
+
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ gint64 duration;
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+ if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT)
+ goto error;
+
+ g_rw_lock_reader_lock (&demux->metadata_lock);
+ if (!mxfpad->material_track || !mxfpad->material_track->parent.sequence) {
+ g_rw_lock_reader_unlock (&demux->metadata_lock);
+ goto error;
+ }
+
+ duration = mxfpad->material_track->parent.sequence->duration;
+ if (duration <= -1)
+ duration = -1;
+
+ if (duration != -1 && format == GST_FORMAT_TIME) {
+ if (mxfpad->material_track->edit_rate.n == 0 ||
+ mxfpad->material_track->edit_rate.d == 0) {
+ g_rw_lock_reader_unlock (&demux->metadata_lock);
+ goto error;
+ }
+
+ duration =
+ gst_util_uint64_scale (duration,
+ GST_SECOND * mxfpad->material_track->edit_rate.d,
+ mxfpad->material_track->edit_rate.n);
+ }
+ g_rw_lock_reader_unlock (&demux->metadata_lock);
+
+ GST_DEBUG_OBJECT (pad,
+ "Returning duration %" G_GINT64_FORMAT " in format %s", duration,
+ gst_format_get_name (format));
+
+ gst_query_set_duration (query, format, duration);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gint64 duration;
+
+ ret = TRUE;
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt != GST_FORMAT_TIME) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ goto done;
+ }
+
+ if (!gst_pad_query_duration (pad, GST_FORMAT_TIME, &duration)) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ goto done;
+ }
+
+ if (demux->random_access) {
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, duration);
+ } else {
+ GstQuery *peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+ gboolean seekable;
+
+ seekable = gst_pad_peer_query (demux->sinkpad, peerquery);
+ if (seekable)
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ if (seekable)
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, duration);
+ else
+ gst_query_set_seeking (query, GST_FORMAT_TIME, FALSE, -1, -1);
+
+ gst_query_unref (peerquery);
+ }
+
+ break;
+ }
+ case GST_QUERY_SEGMENT:{
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->segment, format,
+ demux->segment.start);
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&demux->segment, format, stop);
+
+ gst_query_set_segment (query, demux->segment.rate, format, start, stop);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ done:
+ return ret;
+
+ /* ERRORS */
+ error:
+ {
+ GST_DEBUG_OBJECT (pad, "query failed");
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_mxf_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ GstPadMode mode = GST_PAD_MODE_PUSH;
+
+ query = gst_query_new_scheduling ();
+
+ if (gst_pad_peer_query (sinkpad, query)) {
+ if (gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE)) {
+ GstSchedulingFlags flags;
+ gst_query_parse_scheduling (query, &flags, NULL, NULL, NULL);
+ if (!(flags & GST_SCHEDULING_FLAG_SEQUENTIAL))
+ mode = GST_PAD_MODE_PULL;
+ }
+ }
+ gst_query_unref (query);
+
+ return gst_pad_activate_mode (sinkpad, mode, TRUE);
+ }
+
+ static gboolean
+ gst_mxf_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ GstMXFDemux *demux;
+
+ demux = GST_MXF_DEMUX (parent);
+
+ if (mode == GST_PAD_MODE_PUSH) {
+ demux->random_access = FALSE;
+ } else {
+ if (active) {
+ demux->random_access = TRUE;
+ return gst_pad_start_task (sinkpad, (GstTaskFunction) gst_mxf_demux_loop,
+ sinkpad, NULL);
+ } else {
+ demux->random_access = FALSE;
+ return gst_pad_stop_task (sinkpad);
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_mxf_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstMXFDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_MXF_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (pad, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ demux->flushing = TRUE;
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_DEBUG_OBJECT (demux, "flushing queued data in the MXF demuxer");
+
+ gst_adapter_clear (demux->adapter);
+ demux->flushing = FALSE;
+ demux->offset = 0;
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_EOS:{
+ GstMXFDemuxPad *p = NULL;
+ guint i;
+
+ if (demux->src->len == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, WRONG_TYPE,
+ ("This stream contains no data."),
+ ("got eos and didn't find any streams"));
+ }
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack, i);
+
+ if (t->position > 0)
+ t->duration = t->position;
+ }
+
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *p = g_ptr_array_index (demux->src, i);
+
+ if (!p->eos
+ && p->current_essence_track_position >=
+ p->current_essence_track->duration) {
+ p->eos = TRUE;
+ gst_pad_push_event (GST_PAD_CAST (p), gst_event_new_eos ());
+ }
+ }
+
+ while ((p = gst_mxf_demux_get_earliest_pad (demux))) {
+ guint64 offset;
+ gint64 position;
+
+ position = p->current_essence_track_position;
+
+ offset =
+ gst_mxf_demux_find_essence_element (demux, p->current_essence_track,
+ &position, FALSE);
+ if (offset == -1) {
+ GST_ERROR_OBJECT (demux, "Failed to find offset for essence track");
+ p->eos = TRUE;
+ gst_pad_push_event (GST_PAD_CAST (p), gst_event_new_eos ());
+ continue;
+ }
+
+ if (gst_pad_push_event (demux->sinkpad,
+ gst_event_new_seek (demux->segment.rate, GST_FORMAT_BYTES,
+ demux->segment.flags | GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, offset + demux->run_in,
+ GST_SEEK_TYPE_NONE, 0))) {
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *etrack =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ i);
+ etrack->position = -1;
+ }
+ ret = TRUE;
+ goto out;
+ } else {
+ GST_WARNING_OBJECT (demux,
+ "Seek to remaining part of the file failed");
+ p->eos = TRUE;
+ gst_pad_push_event (GST_PAD_CAST (p), gst_event_new_eos ());
+ continue;
+ }
+ }
+
+ /* and one more time for good measure apparently? */
+ gst_pad_event_default (pad, parent, event);
+ ret = (demux->src->len > 0);
+ break;
+ }
+ case GST_EVENT_SEGMENT:{
+ guint i;
+
+ for (i = 0; i < demux->essence_tracks->len; i++) {
+ GstMXFDemuxEssenceTrack *t =
+ &g_array_index (demux->essence_tracks, GstMXFDemuxEssenceTrack,
+ i);
+ t->position = -1;
+ }
+ demux->current_partition = NULL;
+ demux->seqnum = gst_event_get_seqnum (event);
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ out:
+
+ return ret;
+ }
+
+ static gboolean
+ gst_mxf_demux_query (GstElement * element, GstQuery * query)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (element);
+ gboolean ret = FALSE;
+
+ GST_DEBUG_OBJECT (demux, "handling query %s",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+ gint64 pos;
+
+ gst_query_parse_position (query, &format, NULL);
+ if (format != GST_FORMAT_TIME)
+ goto error;
+
+ pos = demux->segment.position;
+
+ GST_DEBUG_OBJECT (demux,
+ "Returning position %" G_GINT64_FORMAT " in format %s", pos,
+ gst_format_get_name (format));
+
+ gst_query_set_position (query, format, pos);
+ ret = TRUE;
+
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ gint64 duration = -1;
+ GstFormat format;
+ guint i;
+
+ gst_query_parse_duration (query, &format, NULL);
+ if (format != GST_FORMAT_TIME)
+ goto error;
+
+ if (demux->src->len == 0)
+ goto done;
+
+ g_rw_lock_reader_lock (&demux->metadata_lock);
+ for (i = 0; i < demux->src->len; i++) {
+ GstMXFDemuxPad *pad = g_ptr_array_index (demux->src, i);
+ gint64 pdur = -1;
+
+ if (!pad->material_track || !pad->material_track->parent.sequence)
+ continue;
+
+ pdur = pad->material_track->parent.sequence->duration;
+ if (pad->material_track->edit_rate.n == 0 ||
+ pad->material_track->edit_rate.d == 0 || pdur <= -1)
+ continue;
+
+ pdur =
+ gst_util_uint64_scale (pdur,
+ GST_SECOND * pad->material_track->edit_rate.d,
+ pad->material_track->edit_rate.n);
+ duration = MAX (duration, pdur);
+ }
+ g_rw_lock_reader_unlock (&demux->metadata_lock);
+
+ if (duration == -1) {
+ GST_DEBUG_OBJECT (demux, "No duration known (yet)");
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "Returning duration %" G_GINT64_FORMAT " in format %s", duration,
+ gst_format_get_name (format));
+
+ gst_query_set_duration (query, format, duration);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ ret = TRUE;
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt != GST_FORMAT_TIME) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ goto done;
+ }
+
+ if (demux->random_access) {
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, -1);
+ } else {
+ GstQuery *peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+ gboolean seekable;
+
+ seekable = gst_pad_peer_query (demux->sinkpad, peerquery);
+ if (seekable)
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ if (seekable)
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, -1);
+ else
+ gst_query_set_seeking (query, GST_FORMAT_TIME, FALSE, -1, -1);
++
++ gst_query_unref (peerquery);
+ }
+
+ break;
+ }
+ case GST_QUERY_SEGMENT:{
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->segment, format,
+ demux->segment.start);
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&demux->segment, format, stop);
+
+ gst_query_set_segment (query, demux->segment.rate, format, start, stop);
+ ret = TRUE;
+ break;
+ }
+ default:
+ /* else forward upstream */
+ ret = gst_pad_peer_query (demux->sinkpad, query);
+ break;
+ }
+
+ done:
+ return ret;
+
+ /* ERRORS */
+ error:
+ {
+ GST_DEBUG_OBJECT (demux, "query failed");
+ goto done;
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_mxf_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ demux->seqnum = gst_util_seqnum_next ();
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_mxf_demux_reset (demux);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_mxf_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PACKAGE:
+ g_free (demux->requested_package_string);
+ demux->requested_package_string = g_value_dup_string (value);
+ break;
+ case PROP_MAX_DRIFT:
+ demux->max_drift = g_value_get_uint64 (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_mxf_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PACKAGE:
+ g_value_set_string (value, demux->current_package_string);
+ break;
+ case PROP_MAX_DRIFT:
+ g_value_set_uint64 (value, demux->max_drift);
+ break;
+ case PROP_STRUCTURE:{
+ GstStructure *s;
+
+ g_rw_lock_reader_lock (&demux->metadata_lock);
+ if (demux->preface &&
+ MXF_METADATA_BASE (demux->preface)->resolved ==
+ MXF_METADATA_BASE_RESOLVE_STATE_SUCCESS)
+ s = mxf_metadata_base_to_structure (MXF_METADATA_BASE (demux->preface));
+ else
+ s = NULL;
+
+ gst_value_set_structure (value, s);
+
+ if (s)
+ gst_structure_free (s);
+
+ g_rw_lock_reader_unlock (&demux->metadata_lock);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_mxf_demux_finalize (GObject * object)
+ {
+ GstMXFDemux *demux = GST_MXF_DEMUX (object);
+
+ gst_mxf_demux_reset (demux);
+
+ if (demux->adapter) {
+ g_object_unref (demux->adapter);
+ demux->adapter = NULL;
+ }
+
+ if (demux->flowcombiner) {
+ gst_flow_combiner_free (demux->flowcombiner);
+ demux->flowcombiner = NULL;
+ }
+
+ if (demux->close_seg_event) {
+ gst_event_unref (demux->close_seg_event);
+ demux->close_seg_event = NULL;
+ }
+
+ g_free (demux->current_package_string);
+ demux->current_package_string = NULL;
+ g_free (demux->requested_package_string);
+ demux->requested_package_string = NULL;
+
+ g_ptr_array_free (demux->src, TRUE);
+ demux->src = NULL;
+ g_array_free (demux->essence_tracks, TRUE);
+ demux->essence_tracks = NULL;
+
+ g_hash_table_destroy (demux->metadata);
+
+ g_rw_lock_clear (&demux->metadata_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_mxf_demux_class_init (GstMXFDemuxClass * klass)
+ {
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (mxfdemux_debug, "mxfdemux", 0, "MXF demuxer");
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_mxf_demux_finalize;
+ gobject_class->set_property = gst_mxf_demux_set_property;
+ gobject_class->get_property = gst_mxf_demux_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PACKAGE,
+ g_param_spec_string ("package", "Package",
+ "Material or Source package to use for playback", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DRIFT,
+ g_param_spec_uint64 ("max-drift", "Maximum drift",
+ "Maximum number of nanoseconds by which tracks can differ",
+ 100 * GST_MSECOND, G_MAXUINT64, DEFAULT_MAX_DRIFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_STRUCTURE,
+ g_param_spec_boxed ("structure", "Structure",
+ "Structural metadata of the MXF file",
+ GST_TYPE_STRUCTURE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_change_state);
+ gstelement_class->query = GST_DEBUG_FUNCPTR (gst_mxf_demux_query);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &mxf_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &mxf_src_template);
+ gst_element_class_set_static_metadata (gstelement_class, "MXF Demuxer",
+ "Codec/Demuxer", "Demux MXF files",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+ }
+
+ static void
+ gst_mxf_demux_init (GstMXFDemux * demux)
+ {
+ demux->sinkpad =
+ gst_pad_new_from_static_template (&mxf_sink_template, "sink");
+
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_chain));
+ gst_pad_set_activate_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_mxf_demux_sink_activate_mode));
+
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+ demux->max_drift = DEFAULT_MAX_DRIFT;
+
+ demux->adapter = gst_adapter_new ();
+ demux->flowcombiner = gst_flow_combiner_new ();
+ g_rw_lock_init (&demux->metadata_lock);
+
+ demux->src = g_ptr_array_new ();
+ demux->essence_tracks =
+ g_array_new (FALSE, FALSE, sizeof (GstMXFDemuxEssenceTrack));
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ gst_mxf_demux_reset (demux);
+ }
--- /dev/null
- gst_caps_from_string ("video/x-dv, width = "
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* Implementation of SMPTE 383M - Mapping DV-DIF data into the MXF
+ * Generic Container
+ */
+
+ /* TODO:
+ * - playbin hangs on a lot of MXF/DV-DIF files (bug #563827)
+ * - decodebin creates loops inside the linking graph (bug #563828)
+ * - track descriptor might be multiple descriptor, one for sound, one for video
+ * - there might be 2 tracks for one essence, i.e. one audio/one video track
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <string.h>
+
+ #include "mxfdv-dif.h"
+ #include "mxfessence.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (mxf_debug);
+ #define GST_CAT_DEFAULT mxf_debug
+
+ static const MXFUL picture_essence_coding_dv = { {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x01, 0x04, 0x01, 0x02, 0x02,
+ 0x02}
+ };
+
+ static gboolean
+ mxf_is_dv_dif_essence_track (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, FALSE);
+
+ if (track->parent.descriptor == NULL)
+ return FALSE;
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ MXFMetadataFileDescriptor *d = track->parent.descriptor[i];
+ MXFUL *key;
+
+ if (!d)
+ continue;
+
+ key = &d->essence_container;
+ /* SMPTE 383M 8 */
+ if (mxf_is_generic_container_essence_container_label (key) &&
+ key->u[12] == 0x02 && key->u[13] == 0x02) {
+ return TRUE;
+ } else if (mxf_is_avid_essence_container_label (key)) {
+ MXFMetadataGenericPictureEssenceDescriptor *p;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (d))
+ return FALSE;
+ p = MXF_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (d);
+
+ key = &p->picture_essence_coding;
+ if (mxf_ul_is_subclass (&picture_essence_coding_dv, key))
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ mxf_dv_dif_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
+ GstCaps * caps,
+ MXFMetadataTimelineTrack * track,
+ gpointer mapping_data, GstBuffer ** outbuf)
+ {
+ *outbuf = buffer;
+
+ /* SMPTE 383 6.1.1 */
+ if (key->u[12] != 0x18 || (key->u[14] != 0x01 && key->u[14] != 0x02)) {
+ GST_ERROR ("Invalid DV-DIF essence element");
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFEssenceWrapping
+ mxf_dv_dif_get_track_wrapping (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i]))
+ continue;
+
+ switch (track->parent.descriptor[i]->essence_container.u[15]) {
+ case 0x01:
+ return MXF_ESSENCE_WRAPPING_FRAME_WRAPPING;
+ break;
+ case 0x02:
+ return MXF_ESSENCE_WRAPPING_CLIP_WRAPPING;
+ break;
+ default:
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ break;
+ }
+ }
+
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ static GstCaps *
+ mxf_dv_dif_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ GstCaps *caps = NULL;
+ guint i;
+ MXFMetadataGenericPictureEssenceDescriptor *d = NULL;
+
+ g_return_val_if_fail (track != NULL, NULL);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])) {
+ d = MXF_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i]);
+ break;
+ }
+ }
+
+ *handler = mxf_dv_dif_handle_essence_element;
+ /* SMPTE 383M 8 */
+
+ /* TODO: might be video or audio only, use values of the generic sound/picture
+ * descriptor in the caps in that case
+ */
+ GST_DEBUG ("Found DV-DIF stream");
+ caps =
+ gst_caps_new_simple ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+
+ if (d)
+ mxf_metadata_generic_picture_essence_descriptor_set_caps (d, caps);
+
+ if (!*tags)
+ *tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (*tags, GST_TAG_MERGE_APPEND, GST_TAG_CODEC, "DV-DIF", NULL);
+
+ *intra_only = TRUE;
+
+ return caps;
+ }
+
+ static const MXFEssenceElementHandler mxf_dv_dif_essence_element_handler = {
+ mxf_is_dv_dif_essence_track,
+ mxf_dv_dif_get_track_wrapping,
+ mxf_dv_dif_create_caps
+ };
+
+ static GstFlowReturn
+ mxf_dv_dif_write_func (GstBuffer * buffer,
+ gpointer mapping_data, GstAdapter * adapter, GstBuffer ** outbuf,
+ gboolean flush)
+ {
+ *outbuf = buffer;
+ return GST_FLOW_OK;
+ }
+
+ static const guint8 dv_dif_essence_container_ul[] = {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x01,
+ 0x0d, 0x01, 0x03, 0x01, 0x02, 0x02, 0x7f, 0x01
+ };
+
+ static MXFMetadataFileDescriptor *
+ mxf_dv_dif_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataCDCIPictureEssenceDescriptor *ret;
+
+ ret = (MXFMetadataCDCIPictureEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_CDCI_PICTURE_ESSENCE_DESCRIPTOR, NULL);
+
+ memcpy (&ret->parent.parent.essence_container, &dv_dif_essence_container_ul,
+ 16);
+
+ if (!mxf_metadata_generic_picture_essence_descriptor_from_caps (&ret->parent,
+ caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+ *handler = mxf_dv_dif_write_func;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static void
+ mxf_dv_dif_update_descriptor (MXFMetadataFileDescriptor * d, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf)
+ {
+ return;
+ }
+
+ static void
+ mxf_dv_dif_get_edit_rate (MXFMetadataFileDescriptor * a, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf, MXFMetadataSourcePackage * package,
+ MXFMetadataTimelineTrack * track, MXFFraction * edit_rate)
+ {
+ edit_rate->n = a->sample_rate.n;
+ edit_rate->d = a->sample_rate.d;
+ }
+
+ static guint32
+ mxf_dv_dif_get_track_number_template (MXFMetadataFileDescriptor * a,
+ GstCaps * caps, gpointer mapping_data)
+ {
+ return (0x18 << 24) | (0x01 << 8);
+ }
+
+ static MXFEssenceElementWriter mxf_dv_dif_essence_element_writer = {
+ mxf_dv_dif_get_descriptor,
+ mxf_dv_dif_update_descriptor,
+ mxf_dv_dif_get_edit_rate,
+ mxf_dv_dif_get_track_number_template,
+ NULL,
+ {{0,}}
+ };
+
+ void
+ mxf_dv_dif_init (void)
+ {
++ GstCaps *tmp = NULL;
+ mxf_essence_element_handler_register (&mxf_dv_dif_essence_element_handler);
+
+ mxf_dv_dif_essence_element_writer.pad_template =
+ gst_pad_template_new ("dv_dif_video_sink_%u", GST_PAD_SINK,
+ GST_PAD_REQUEST,
++ tmp = gst_caps_from_string ("video/x-dv, width = "
+ GST_VIDEO_SIZE_RANGE ", height = " GST_VIDEO_SIZE_RANGE
+ ", framerate = " GST_VIDEO_FPS_RANGE ", systemstream = true"));
++ gst_caps_unref (tmp);
+ memcpy (&mxf_dv_dif_essence_element_writer.data_definition,
+ mxf_metadata_track_identifier_get (MXF_METADATA_TRACK_PICTURE_ESSENCE),
+ 16);
+ mxf_essence_element_writer_register (&mxf_dv_dif_essence_element_writer);
+ }
--- /dev/null
- gst_caps_from_string ("image/x-jpc, width = "
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* Implementation of SMPTE 422M - Mapping JPEG2000 codestreams into the MXF
+ * Generic Container
+ */
+
+ /* TODO:
+ * - parse the jpeg2000 sub-descriptor, see SMPTE 422M 7.2
+ * - Add support for XYZ colorspace
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <string.h>
+
+ #include "mxfjpeg2000.h"
+ #include "mxfessence.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (mxf_debug);
+ #define GST_CAT_DEFAULT mxf_debug
+
+ static gboolean
+ mxf_is_jpeg2000_essence_track (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, FALSE);
+
+ if (track->parent.descriptor == NULL)
+ return FALSE;
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ MXFMetadataFileDescriptor *d = track->parent.descriptor[i];
+ MXFUL *key;
+
+ if (!d)
+ continue;
+
+ key = &d->essence_container;
+ /* SMPTE 422M 5.4 */
+ if (mxf_is_generic_container_essence_container_label (key) &&
+ key->u[12] == 0x02 && key->u[13] == 0x0c &&
+ (key->u[14] == 0x01 || key->u[14] == 0x02))
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ mxf_jpeg2000_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
+ GstCaps * caps,
+ MXFMetadataTimelineTrack * track,
+ gpointer mapping_data, GstBuffer ** outbuf)
+ {
+ *outbuf = buffer;
+
+ /* SMPTE 422M 5.1 */
+ if (key->u[12] != 0x15 || (key->u[14] != 0x08 && key->u[14] != 0x09)) {
+ GST_ERROR ("Invalid JPEG2000 essence element");
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFEssenceWrapping
+ mxf_jpeg2000_get_track_wrapping (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])
+ && !(MXF_IS_METADATA_FILE_DESCRIPTOR (track->parent.descriptor[i])
+ && !MXF_IS_METADATA_MULTIPLE_DESCRIPTOR (track->
+ parent.descriptor[i])))
+ continue;
+
+ switch (track->parent.descriptor[i]->essence_container.u[14]) {
+ case 0x01:
+ return MXF_ESSENCE_WRAPPING_FRAME_WRAPPING;
+ break;
+ case 0x02:
+ return MXF_ESSENCE_WRAPPING_CLIP_WRAPPING;
+ break;
+ default:
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ break;
+ }
+ }
+
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ static GstCaps *
+ mxf_jpeg2000_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ MXFMetadataFileDescriptor *f = NULL;
+ MXFMetadataGenericPictureEssenceDescriptor *p = NULL;
+ guint i;
+ GstCaps *caps = NULL;
+ const gchar *colorspace;
+
+ g_return_val_if_fail (track != NULL, NULL);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])) {
+ p = (MXFMetadataGenericPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ f = track->parent.descriptor[i];
+ break;
+ } else if (MXF_IS_METADATA_FILE_DESCRIPTOR (track->parent.descriptor[i]) &&
+ !MXF_IS_METADATA_MULTIPLE_DESCRIPTOR (track->parent.descriptor[i])) {
+ f = track->parent.descriptor[i];
+ }
+ }
+
+ if (!f) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ colorspace = "sRGB";
+ if (p && MXF_IS_METADATA_CDCI_PICTURE_ESSENCE_DESCRIPTOR (p)) {
+ colorspace = "sYUV";
+ } else if (p && MXF_IS_METADATA_RGBA_PICTURE_ESSENCE_DESCRIPTOR (p)) {
+ MXFMetadataRGBAPictureEssenceDescriptor *r =
+ (MXFMetadataRGBAPictureEssenceDescriptor *) p;
+ gboolean rgb = TRUE;
+ gboolean xyz = TRUE;
+ gboolean yuv = TRUE;
+ guint i;
+
+ if (r->pixel_layout) {
+ for (i = 0; i < r->n_pixel_layout; i++) {
+ guint8 c = r->pixel_layout[2 * i];
+
+ switch (c) {
+ case 'R':
+ case 'r':
+ case 'G':
+ case 'g':
+ case 'B':
+ case 'b':
+ xyz = yuv = FALSE;
+ break;
+ case 'Y':
+ case 'y':
+ rgb = FALSE;
+ break;
+ case 'U':
+ case 'u':
+ case 'V':
+ case 'v':
+ xyz = rgb = FALSE;
+ break;
+ case 'X':
+ case 'x':
+ case 'Z':
+ case 'z':
+ rgb = yuv = FALSE;
+ break;
+ default:
+ break;
+ }
+ }
+ if (rgb) {
+ colorspace = "sRGB";
+ } else if (yuv) {
+ colorspace = "sYUV";
+ } else if (xyz) {
+ GST_ERROR ("JPEG2000 with XYZ colorspace not supported yet");
+ return NULL;
+ }
+ }
+ }
+
+ *handler = mxf_jpeg2000_handle_essence_element;
+
+ /* TODO: What about other field values? */
+ caps =
+ gst_caps_new_simple ("image/x-jpc", "colorspace",
+ G_TYPE_STRING, colorspace, NULL);
+ if (p) {
+ mxf_metadata_generic_picture_essence_descriptor_set_caps (p, caps);
+ } else {
+ GST_WARNING ("Only a generic file descriptor found");
+ }
+
+ if (!*tags)
+ *tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (*tags, GST_TAG_MERGE_APPEND, GST_TAG_VIDEO_CODEC,
+ "JPEG 2000", NULL);
+
+ *intra_only = TRUE;
+
+ return caps;
+ }
+
+ static const MXFEssenceElementHandler mxf_jpeg2000_essence_element_handler = {
+ mxf_is_jpeg2000_essence_track,
+ mxf_jpeg2000_get_track_wrapping,
+ mxf_jpeg2000_create_caps
+ };
+
+ static GstFlowReturn
+ mxf_jpeg2000_write_func (GstBuffer * buffer,
+ gpointer mapping_data, GstAdapter * adapter, GstBuffer ** outbuf,
+ gboolean flush)
+ {
+ *outbuf = buffer;
+ return GST_FLOW_OK;
+ }
+
+ static const guint8 jpeg2000_essence_container_ul[] = {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x07,
+ 0x0d, 0x01, 0x03, 0x01, 0x02, 0x0c, 0x01, 0x00
+ };
+
+ static const guint jpeg2000_picture_essence_coding[] = {
+ 0x06, 0x0E, 0x2B, 0x34, 0x04, 0x01, 0x01, 0x07,
+ 0x04, 0x01, 0x02, 0x02, 0x03, 0x01, 0x01, 0x00
+ };
+
+ static MXFMetadataFileDescriptor *
+ mxf_jpeg2000_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataRGBAPictureEssenceDescriptor *ret;
+ GstStructure *s;
+ const gchar *colorspace;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (strcmp (gst_structure_get_name (s), "image/x-jpc") != 0 ||
+ !gst_structure_get_string (s, "colorspace")) {
+ GST_ERROR ("Invalid caps %" GST_PTR_FORMAT, caps);
+ return NULL;
+ }
+
+ colorspace = gst_structure_get_string (s, "colorspace");
+
+ ret = (MXFMetadataRGBAPictureEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_RGBA_PICTURE_ESSENCE_DESCRIPTOR, NULL);
+
+ memcpy (&ret->parent.parent.essence_container, &jpeg2000_essence_container_ul,
+ 16);
+ memcpy (&ret->parent.picture_essence_coding, &jpeg2000_picture_essence_coding,
+ 16);
+
+ if (g_str_equal (colorspace, "sRGB")) {
+ ret->n_pixel_layout = 3;
+ ret->pixel_layout = g_new0 (guint8, 6);
+ ret->pixel_layout[0] = 'R';
+ ret->pixel_layout[1] = 8;
+ ret->pixel_layout[2] = 'G';
+ ret->pixel_layout[3] = 8;
+ ret->pixel_layout[4] = 'B';
+ ret->pixel_layout[5] = 8;
+ } else if (g_str_equal (colorspace, "sYUV")) {
+ ret->n_pixel_layout = 3;
+ ret->pixel_layout = g_new0 (guint8, 6);
+ ret->pixel_layout[0] = 'Y';
+ ret->pixel_layout[1] = 8;
+ ret->pixel_layout[2] = 'U';
+ ret->pixel_layout[3] = 8;
+ ret->pixel_layout[4] = 'V';
+ ret->pixel_layout[5] = 8;
+ } else {
+ g_assert_not_reached ();
+ }
+
+ if (!mxf_metadata_generic_picture_essence_descriptor_from_caps (&ret->parent,
+ caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ *handler = mxf_jpeg2000_write_func;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static void
+ mxf_jpeg2000_update_descriptor (MXFMetadataFileDescriptor * d, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf)
+ {
+ return;
+ }
+
+ static void
+ mxf_jpeg2000_get_edit_rate (MXFMetadataFileDescriptor * a, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf, MXFMetadataSourcePackage * package,
+ MXFMetadataTimelineTrack * track, MXFFraction * edit_rate)
+ {
+ edit_rate->n = a->sample_rate.n;
+ edit_rate->d = a->sample_rate.d;
+ }
+
+ static guint32
+ mxf_jpeg2000_get_track_number_template (MXFMetadataFileDescriptor * a,
+ GstCaps * caps, gpointer mapping_data)
+ {
+ return (0x15 << 24) | (0x08 << 8);
+ }
+
+ static MXFEssenceElementWriter mxf_jpeg2000_essence_element_writer = {
+ mxf_jpeg2000_get_descriptor,
+ mxf_jpeg2000_update_descriptor,
+ mxf_jpeg2000_get_edit_rate,
+ mxf_jpeg2000_get_track_number_template,
+ NULL,
+ {{0,}}
+ };
+
+ void
+ mxf_jpeg2000_init (void)
+ {
++ GstCaps *tmp = NULL;
+ mxf_essence_element_handler_register (&mxf_jpeg2000_essence_element_handler);
+
+ mxf_jpeg2000_essence_element_writer.pad_template =
+ gst_pad_template_new ("jpeg2000_video_sink_%u", GST_PAD_SINK,
+ GST_PAD_REQUEST,
++ tmp = gst_caps_from_string ("image/x-jpc, width = "
+ GST_VIDEO_SIZE_RANGE ", height = " GST_VIDEO_SIZE_RANGE
+ ", framerate = " GST_VIDEO_FPS_RANGE
+ ", colorspace = (string) { \"sRGB\", \"sYUV\" }"));
++ gst_caps_unref (tmp);
+ memcpy (&mxf_jpeg2000_essence_element_writer.data_definition,
+ mxf_metadata_track_identifier_get (MXF_METADATA_TRACK_PICTURE_ESSENCE),
+ 16);
+ mxf_essence_element_writer_register (&mxf_jpeg2000_essence_element_writer);
+ }
--- /dev/null
-/* TODO:
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* Implementation of SMPTE 384M - Mapping of Uncompressed Pictures into the MXF
+ * Generic Container
+ */
+
- gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGB") "; "
++/* TODO:
+ * - Handle CDCI essence
+ * - Handle more formats with RGBA descriptor (4:4:4 / 4:4:4:4 YUV, RGB656, ...)
+ * - Handle all the dimensions and other properties in the picture
+ * essence descriptors correctly according to S377M Annex E
+ * - Handle interlaced correctly, i.e. weave until we support one-field-per-buffer
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <string.h>
+
+ #include <gst/video/video.h>
+
+ #include "mxfup.h"
+ #include "mxfessence.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (mxf_debug);
+ #define GST_CAT_DEFAULT mxf_debug
+
+ static const struct
+ {
+ const gchar *format;
+ guint32 n_pixel_layout;
+ guint8 pixel_layout[10];
+ const gchar *caps_string;
+ } _rgba_mapping_table[] = {
+ {
+ "RGB", 3, {
+ 'R', 8, 'G', 8, 'B', 8}, GST_VIDEO_CAPS_MAKE ("RGB")}, {
+ "BGR", 3, {
+ 'B', 8, 'G', 8, 'R', 8}, GST_VIDEO_CAPS_MAKE ("BGR")}, {
+ "v308", 3, {
+ 'Y', 8, 'U', 8, 'V', 8}, GST_VIDEO_CAPS_MAKE ("v308")}, {
+ "xRGB", 4, {
+ 'F', 8, 'R', 8, 'G', 8, 'B', 8}, GST_VIDEO_CAPS_MAKE ("xRGB")}, {
+ "RGBx", 4, {
+ 'R', 8, 'G', 8, 'B', 8, 'F', 8}, GST_VIDEO_CAPS_MAKE ("RGBx")}, {
+ "xBGR", 4, {
+ 'F', 8, 'B', 8, 'G', 8, 'R', 8}, GST_VIDEO_CAPS_MAKE ("xBGR")}, {
+ "BGRx", 4, {
+ 'B', 8, 'G', 8, 'R', 8, 'F', 8}, GST_VIDEO_CAPS_MAKE ("BGRx")}, {
+ "RGBA", 4, {
+ 'R', 8, 'G', 8, 'B', 8, 'A', 8}, GST_VIDEO_CAPS_MAKE ("RGBA")}, {
+ "ARGB", 4, {
+ 'A', 8, 'R', 8, 'G', 8, 'B', 8}, GST_VIDEO_CAPS_MAKE ("RGBA")}, {
+ "BGRA", 4, {
+ 'B', 8, 'G', 8, 'R', 8, 'A', 8}, GST_VIDEO_CAPS_MAKE ("BGRA")}, {
+ "ABGR", 4, {
+ 'A', 8, 'B', 8, 'G', 8, 'R', 8}, GST_VIDEO_CAPS_MAKE ("ABGR")}, {
+ "AYUV", 4, {
+ 'A', 8, 'Y', 8, 'U', 8, 'V', 8}, GST_VIDEO_CAPS_MAKE ("AYUV")}
+ };
+
+ static const struct
+ {
+ const gchar *format;
+ guint bpp;
+ guint horizontal_subsampling;
+ guint vertical_subsampling;
+ gboolean reversed_byte_order;
+ const gchar *caps_string;
+ } _cdci_mapping_table[] = {
+ {
+ "YUY2", 2, 1, 0, TRUE, GST_VIDEO_CAPS_MAKE ("YUY2")}, {
+ "UYVY", 2, 1, 0, FALSE, GST_VIDEO_CAPS_MAKE ("UYVY")},};
+
+ typedef struct
+ {
+ const gchar *format; /* video format string */
+ gint width, height;
+ guint bpp;
+ guint32 image_start_offset;
+ guint32 image_end_offset;
+ } MXFUPMappingData;
+
+ static gboolean
+ mxf_is_up_essence_track (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, FALSE);
+
+ if (track->parent.descriptor == NULL)
+ return FALSE;
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ MXFMetadataFileDescriptor *d = track->parent.descriptor[i];
+ MXFUL *key;
+
+ if (!d)
+ continue;
+
+ key = &d->essence_container;
+ /* SMPTE 384M 8 */
+ if (mxf_is_generic_container_essence_container_label (key) &&
+ key->u[12] == 0x02 && key->u[13] == 0x05 && key->u[15] <= 0x03)
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ mxf_up_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
+ GstCaps * caps,
+ MXFMetadataTimelineTrack * track,
+ gpointer mapping_data, GstBuffer ** outbuf)
+ {
+ MXFUPMappingData *data = mapping_data;
+
+ /* SMPTE 384M 7.1 */
+ if (key->u[12] != 0x15 || (key->u[14] != 0x01 && key->u[14] != 0x02
+ && key->u[14] != 0x03 && key->u[14] != 0x04)) {
+ GST_ERROR ("Invalid uncompressed picture essence element");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ if (!data) {
+ GST_ERROR ("Invalid mapping data");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ if (data->image_start_offset == 0 && data->image_end_offset == 0) {
+ } else {
+ if (data->image_start_offset + data->image_end_offset
+ > gst_buffer_get_size (buffer)) {
+ gst_buffer_unref (buffer);
+ GST_ERROR ("Invalid buffer size");
+ return GST_FLOW_ERROR;
+ } else {
+ gst_buffer_resize (buffer, data->image_start_offset,
+ data->image_end_offset - data->image_start_offset);
+ }
+ }
+
+ if (gst_buffer_get_size (buffer) != data->bpp * data->width * data->height) {
+ GST_ERROR ("Invalid buffer size");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ if (data->bpp != 4
+ || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) {
+ guint y;
+ GstBuffer *ret;
+ GstMapInfo inmap, outmap;
+ guint8 *indata, *outdata;
+
+ ret =
+ gst_buffer_new_and_alloc (GST_ROUND_UP_4 (data->width * data->bpp) *
+ data->height);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ gst_buffer_map (ret, &outmap, GST_MAP_WRITE);
+ indata = inmap.data;
+ outdata = outmap.data;
+
+ for (y = 0; y < data->height; y++) {
+ memcpy (outdata, indata, data->width * data->bpp);
+ outdata += GST_ROUND_UP_4 (data->width * data->bpp);
+ indata += data->width * data->bpp;
+ }
+
+ gst_buffer_unmap (buffer, &inmap);
+ gst_buffer_unmap (ret, &outmap);
+
+ gst_buffer_unref (buffer);
+ *outbuf = ret;
+ } else {
+ *outbuf = buffer;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFEssenceWrapping
+ mxf_up_get_track_wrapping (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i]))
+ continue;
+
+ switch (track->parent.descriptor[i]->essence_container.u[15]) {
+ case 0x01:
+ return MXF_ESSENCE_WRAPPING_FRAME_WRAPPING;
+ break;
+ case 0x02:
+ return MXF_ESSENCE_WRAPPING_CLIP_WRAPPING;
+ break;
+ default:
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ break;
+ }
+ }
+
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ static GstCaps *
+ mxf_up_rgba_create_caps (MXFMetadataTimelineTrack * track,
+ MXFMetadataRGBAPictureEssenceDescriptor * d, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ GstCaps *caps = NULL;
+ guint i;
+ const gchar *format = NULL;
+ guint bpp;
+
+ if (!d->pixel_layout) {
+ GST_ERROR ("No pixel layout");
+ return NULL;
+ }
+
+ for (i = 0; i < G_N_ELEMENTS (_rgba_mapping_table); i++) {
+ if (d->n_pixel_layout != _rgba_mapping_table[i].n_pixel_layout)
+ continue;
+
+ if (memcmp (d->pixel_layout, &_rgba_mapping_table[i].pixel_layout,
+ _rgba_mapping_table[i].n_pixel_layout * 2) == 0) {
+ caps = gst_caps_from_string (_rgba_mapping_table[i].caps_string);
+ format = _rgba_mapping_table[i].format;
+ bpp = _rgba_mapping_table[i].n_pixel_layout;
+ break;
+ }
+ }
+
+ if (caps) {
+ MXFUPMappingData *data = g_new0 (MXFUPMappingData, 1);
+
+ mxf_metadata_generic_picture_essence_descriptor_set_caps (&d->parent, caps);
+
+ data->width = d->parent.stored_width;
+ data->height = d->parent.stored_height;
+ data->format = format;
+ data->bpp = bpp;
+ data->image_start_offset =
+ ((MXFMetadataGenericPictureEssenceDescriptor *) d)->image_start_offset;
+ data->image_end_offset =
+ ((MXFMetadataGenericPictureEssenceDescriptor *) d)->image_end_offset;
+
+ *mapping_data = data;
+ *intra_only = TRUE;
+ } else {
+ GST_WARNING ("Unsupported pixel layout");
+ }
+
+ return caps;
+ }
+
+ static GstCaps *
+ mxf_up_cdci_create_caps (MXFMetadataTimelineTrack * track,
+ MXFMetadataCDCIPictureEssenceDescriptor * d, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ GstCaps *caps = NULL;
+ guint i;
+ const gchar *format;
+ guint bpp;
+
+ for (i = 0; i < G_N_ELEMENTS (_cdci_mapping_table); i++) {
+ if (_cdci_mapping_table[i].horizontal_subsampling ==
+ d->horizontal_subsampling
+ && _cdci_mapping_table[i].vertical_subsampling ==
+ d->vertical_subsampling
+ && _cdci_mapping_table[i].reversed_byte_order ==
+ d->reversed_byte_order) {
+ caps = gst_caps_from_string (_cdci_mapping_table[i].caps_string);
+ format = _cdci_mapping_table[i].format;
+ bpp = _cdci_mapping_table[i].bpp;
+ break;
+ }
+ }
+
+ if (caps) {
+ MXFUPMappingData *data = g_new0 (MXFUPMappingData, 1);
+
+ mxf_metadata_generic_picture_essence_descriptor_set_caps (&d->parent, caps);
+
+ data->width = d->parent.stored_width;
+ data->height = d->parent.stored_height;
+ data->format = format;
+ data->bpp = bpp;
+ data->image_start_offset =
+ ((MXFMetadataGenericPictureEssenceDescriptor *) d)->image_start_offset;
+ data->image_end_offset =
+ ((MXFMetadataGenericPictureEssenceDescriptor *) d)->image_end_offset;
+
+ *mapping_data = data;
+ *intra_only = TRUE;
+ } else {
+ GST_WARNING ("Unsupported CDCI format");
+ }
+
+ return caps;
+ }
+
+ static GstCaps *
+ mxf_up_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ MXFMetadataGenericPictureEssenceDescriptor *p = NULL;
+ MXFMetadataCDCIPictureEssenceDescriptor *c = NULL;
+ MXFMetadataRGBAPictureEssenceDescriptor *r = NULL;
+ guint i;
+ GstCaps *caps = NULL;
+
+ g_return_val_if_fail (track != NULL, NULL);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (MXF_IS_METADATA_RGBA_PICTURE_ESSENCE_DESCRIPTOR (track->parent.
+ descriptor[i])) {
+ p = (MXFMetadataGenericPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ r = (MXFMetadataRGBAPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ break;
+ } else if (MXF_IS_METADATA_CDCI_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])) {
+ p = (MXFMetadataGenericPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ c = (MXFMetadataCDCIPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ }
+ }
+
+ if (!p) {
+ GST_ERROR ("No picture essence descriptor found for this track");
+ return NULL;
+ }
+
+ *handler = mxf_up_handle_essence_element;
+
+ if (r) {
+ caps =
+ mxf_up_rgba_create_caps (track, r, tags, intra_only, handler,
+ mapping_data);
+ } else if (c) {
+ caps =
+ mxf_up_cdci_create_caps (track, c, tags, intra_only, handler,
+ mapping_data);
+ } else {
+ return NULL;
+ }
+
+ return caps;
+ }
+
+ static const MXFEssenceElementHandler mxf_up_essence_element_handler = {
+ mxf_is_up_essence_track,
+ mxf_up_get_track_wrapping,
+ mxf_up_create_caps
+ };
+
+ static GstFlowReturn
+ mxf_up_write_func (GstBuffer * buffer, gpointer mapping_data,
+ GstAdapter * adapter, GstBuffer ** outbuf, gboolean flush)
+ {
+ MXFUPMappingData *data = mapping_data;
+
+ if (!buffer)
+ return GST_FLOW_OK;
+
+ if (gst_buffer_get_size (buffer) !=
+ GST_ROUND_UP_4 (data->bpp * data->width) * data->height) {
+ GST_ERROR ("Invalid buffer size");
+ return GST_FLOW_ERROR;
+ }
+
+ if (data->bpp != 4
+ || GST_ROUND_UP_4 (data->width * data->bpp) != data->width * data->bpp) {
+ guint y;
+ GstBuffer *ret;
+ GstMapInfo inmap, outmap;
+ guint8 *indata, *outdata;
+
+ ret = gst_buffer_new_and_alloc (data->width * data->bpp * data->height);
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ gst_buffer_map (ret, &outmap, GST_MAP_WRITE);
+ indata = inmap.data;
+ outdata = outmap.data;
+
+ for (y = 0; y < data->height; y++) {
+ memcpy (outdata, indata, data->width * data->bpp);
+ indata += GST_ROUND_UP_4 (data->width * data->bpp);
+ outdata += data->width * data->bpp;
+ }
+
+ gst_buffer_unmap (buffer, &inmap);
+ gst_buffer_unmap (ret, &outmap);
+ gst_buffer_unref (buffer);
+
+ *outbuf = ret;
+ } else {
+ *outbuf = buffer;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static const guint8 up_essence_container_ul[] = {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x01,
+ 0x0D, 0x01, 0x03, 0x01, 0x02, 0x05, 0x7F, 0x01
+ };
+
+ static MXFMetadataFileDescriptor *
+ mxf_up_get_rgba_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataRGBAPictureEssenceDescriptor *ret;
+ guint i;
+ GstCaps *tmp, *intersection;
+ MXFUPMappingData *md = g_new0 (MXFUPMappingData, 1);
+
+ *mapping_data = md;
+
+ ret = (MXFMetadataRGBAPictureEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_RGBA_PICTURE_ESSENCE_DESCRIPTOR, NULL);
+
+ for (i = 0; i < G_N_ELEMENTS (_rgba_mapping_table); i++) {
+ tmp = gst_caps_from_string (_rgba_mapping_table[i].caps_string);
+ intersection = gst_caps_intersect (caps, tmp);
+ gst_caps_unref (tmp);
+
+ if (!gst_caps_is_empty (intersection)) {
+ gst_caps_unref (intersection);
+ ret->n_pixel_layout = _rgba_mapping_table[i].n_pixel_layout;
+ ret->pixel_layout = g_new0 (guint8, ret->n_pixel_layout * 2);
+ md->format = _rgba_mapping_table[i].format;
+ md->bpp = _rgba_mapping_table[i].n_pixel_layout;
+ memcpy (ret->pixel_layout, _rgba_mapping_table[i].pixel_layout,
+ ret->n_pixel_layout * 2);
+ break;
+ }
+ gst_caps_unref (intersection);
+ }
+
+ if (md->format == NULL) {
+ GST_ERROR ("Invalid caps %" GST_PTR_FORMAT, caps);
+ g_object_unref (ret);
+ return NULL;
+ }
+
+
+ memcpy (&ret->parent.parent.essence_container, &up_essence_container_ul, 16);
+
+ if (!mxf_metadata_generic_picture_essence_descriptor_from_caps (&ret->parent,
+ caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ md->width = ret->parent.stored_width;
+ md->height = ret->parent.stored_height;
+
+ *handler = mxf_up_write_func;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static MXFMetadataFileDescriptor *
+ mxf_up_get_cdci_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataCDCIPictureEssenceDescriptor *ret;
+ guint i;
+ GstCaps *tmp, *intersection;
+ MXFUPMappingData *md = g_new0 (MXFUPMappingData, 1);
+
+ *mapping_data = md;
+
+ ret = (MXFMetadataCDCIPictureEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_CDCI_PICTURE_ESSENCE_DESCRIPTOR, NULL);
+
+ for (i = 0; i < G_N_ELEMENTS (_cdci_mapping_table); i++) {
+ tmp = gst_caps_from_string (_cdci_mapping_table[i].caps_string);
+ intersection = gst_caps_intersect (caps, tmp);
+ gst_caps_unref (tmp);
+
+ if (!gst_caps_is_empty (intersection)) {
+ gst_caps_unref (intersection);
+ ret->horizontal_subsampling =
+ _cdci_mapping_table[i].horizontal_subsampling;
+ ret->vertical_subsampling = _cdci_mapping_table[i].vertical_subsampling;
+ ret->reversed_byte_order = _cdci_mapping_table[i].reversed_byte_order;
+ md->format = _cdci_mapping_table[i].format;
+ md->bpp = _cdci_mapping_table[i].bpp;
+ break;
+ }
+ gst_caps_unref (intersection);
+ }
+
+ if (md->format == NULL) {
+ GST_ERROR ("Invalid caps %" GST_PTR_FORMAT, caps);
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ memcpy (&ret->parent.parent.essence_container, &up_essence_container_ul, 16);
+
+ if (!mxf_metadata_generic_picture_essence_descriptor_from_caps (&ret->parent,
+ caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ md->width = ret->parent.stored_width;
+ md->height = ret->parent.stored_height;
+
+ *handler = mxf_up_write_func;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static MXFMetadataFileDescriptor *
+ mxf_up_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (strcmp (gst_structure_get_name (s), "video/x-raw") == 0) {
+ const gchar *format;
+
+ format = gst_structure_get_string (s, "format");
+ if (format == NULL)
+ return NULL;
+
+ if (g_str_equal (format, "YUY2") || g_str_equal (format, "UYVY"))
+ return mxf_up_get_cdci_descriptor (tmpl, caps, handler, mapping_data);
+ else
+ return mxf_up_get_rgba_descriptor (tmpl, caps, handler, mapping_data);
+ }
+
+ g_assert_not_reached ();
+ return NULL;
+ }
+
+ static void
+ mxf_up_update_descriptor (MXFMetadataFileDescriptor * d, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf)
+ {
+ return;
+ }
+
+ static void
+ mxf_up_get_edit_rate (MXFMetadataFileDescriptor * a, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf, MXFMetadataSourcePackage * package,
+ MXFMetadataTimelineTrack * track, MXFFraction * edit_rate)
+ {
+ edit_rate->n = a->sample_rate.n;
+ edit_rate->d = a->sample_rate.d;
+ }
+
+ static guint32
+ mxf_up_get_track_number_template (MXFMetadataFileDescriptor * a,
+ GstCaps * caps, gpointer mapping_data)
+ {
+ return (0x15 << 24) | (0x02 << 8);
+ }
+
+ static MXFEssenceElementWriter mxf_up_essence_element_writer = {
+ mxf_up_get_descriptor,
+ mxf_up_update_descriptor,
+ mxf_up_get_edit_rate,
+ mxf_up_get_track_number_template,
+ NULL,
+ {{0,}}
+ };
+
+ void
+ mxf_up_init (void)
+ {
++ GstCaps *tmp = NULL;
+ mxf_essence_element_handler_register (&mxf_up_essence_element_handler);
+ mxf_up_essence_element_writer.pad_template =
+ gst_pad_template_new ("up_video_sink_%u", GST_PAD_SINK, GST_PAD_REQUEST,
++ tmp = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("RGB") "; "
+ GST_VIDEO_CAPS_MAKE ("BGR") "; "
+ GST_VIDEO_CAPS_MAKE ("RGBx") "; "
+ GST_VIDEO_CAPS_MAKE ("xRGB") "; "
+ GST_VIDEO_CAPS_MAKE ("BGRx") "; "
+ GST_VIDEO_CAPS_MAKE ("xBGR") "; "
+ GST_VIDEO_CAPS_MAKE ("ARGB") "; "
+ GST_VIDEO_CAPS_MAKE ("RGBA") "; "
+ GST_VIDEO_CAPS_MAKE ("ABGR") "; "
+ GST_VIDEO_CAPS_MAKE ("BGRA") "; "
+ GST_VIDEO_CAPS_MAKE ("AYUV") "; "
+ GST_VIDEO_CAPS_MAKE ("v308") "; "
+ GST_VIDEO_CAPS_MAKE ("UYVY") "; " GST_VIDEO_CAPS_MAKE ("YUY2")));
++ gst_caps_unref (tmp);
+
+ memcpy (&mxf_up_essence_element_writer.data_definition,
+ mxf_metadata_track_identifier_get (MXF_METADATA_TRACK_PICTURE_ESSENCE),
+ 16);
+ mxf_essence_element_writer_register (&mxf_up_essence_element_writer);
+ }
--- /dev/null
- gst_caps_from_string ("video/x-dnxhd, width = " GST_VIDEO_SIZE_RANGE
+ /* GStreamer
+ * Copyright (C) 2008-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* Implementation of SMPTE S2019-4 - Mapping VC-3 coding units into the MXF
+ * Generic Container
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <string.h>
+
+ #include "mxfvc3.h"
+ #include "mxfessence.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (mxf_debug);
+ #define GST_CAT_DEFAULT mxf_debug
+
+ static const MXFUL picture_essence_coding_vc3_avid = { {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x01, 0x0e, 0x04, 0x02, 0x01,
+ 0x02,
+ 0x04, 0x01, 0x00}
+ };
+
+ static gboolean
+ mxf_is_vc3_essence_track (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, FALSE);
+
+ if (track->parent.descriptor == NULL)
+ return FALSE;
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ MXFMetadataFileDescriptor *d = track->parent.descriptor[i];
+ MXFUL *key;
+
+ if (!d)
+ continue;
+
+ key = &d->essence_container;
+ /* SMPTE S2019-4 7 */
+ if (mxf_is_generic_container_essence_container_label (key) &&
+ key->u[12] == 0x02 && key->u[13] == 0x11 &&
+ (key->u[14] == 0x01 || key->u[14] == 0x02)) {
+ return TRUE;
+ } else if (mxf_is_avid_essence_container_label (key)) {
+ MXFMetadataGenericPictureEssenceDescriptor *p;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (d))
+ return FALSE;
+ p = MXF_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (d);
+
+ key = &p->picture_essence_coding;
+ if (mxf_ul_is_subclass (&picture_essence_coding_vc3_avid, key))
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ mxf_vc3_handle_essence_element (const MXFUL * key, GstBuffer * buffer,
+ GstCaps * caps,
+ MXFMetadataTimelineTrack * track,
+ gpointer mapping_data, GstBuffer ** outbuf)
+ {
+ *outbuf = buffer;
+
+ /* SMPTE 2019-4 6.1 */
+ if (key->u[12] != 0x15 || (key->u[14] != 0x05 && key->u[14] != 0x06
+ && key->u[14] != 0x0C && key->u[14] != 0x0D)) {
+ GST_ERROR ("Invalid VC-3 essence element");
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static MXFEssenceWrapping
+ mxf_vc3_get_track_wrapping (const MXFMetadataTimelineTrack * track)
+ {
+ guint i;
+
+ g_return_val_if_fail (track != NULL, MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (!MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])
+ && !(MXF_IS_METADATA_FILE_DESCRIPTOR (track->parent.descriptor[i])
+ && !MXF_IS_METADATA_MULTIPLE_DESCRIPTOR (track->
+ parent.descriptor[i])))
+ continue;
+
+ switch (track->parent.descriptor[i]->essence_container.u[14]) {
+ case 0x01:
+ return MXF_ESSENCE_WRAPPING_FRAME_WRAPPING;
+ break;
+ case 0x02:
+ return MXF_ESSENCE_WRAPPING_CLIP_WRAPPING;
+ break;
+ default:
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ break;
+ }
+ }
+
+ return MXF_ESSENCE_WRAPPING_CUSTOM_WRAPPING;
+ }
+
+ static GstCaps *
+ mxf_vc3_create_caps (MXFMetadataTimelineTrack * track, GstTagList ** tags,
+ gboolean * intra_only, MXFEssenceElementHandleFunc * handler,
+ gpointer * mapping_data)
+ {
+ MXFMetadataFileDescriptor *f = NULL;
+ MXFMetadataGenericPictureEssenceDescriptor *p = NULL;
+ guint i;
+ GstCaps *caps = NULL;
+
+ g_return_val_if_fail (track != NULL, NULL);
+
+ if (track->parent.descriptor == NULL) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ for (i = 0; i < track->parent.n_descriptor; i++) {
+ if (!track->parent.descriptor[i])
+ continue;
+
+ if (MXF_IS_METADATA_GENERIC_PICTURE_ESSENCE_DESCRIPTOR (track->
+ parent.descriptor[i])) {
+ p = (MXFMetadataGenericPictureEssenceDescriptor *) track->parent.
+ descriptor[i];
+ f = track->parent.descriptor[i];
+ break;
+ } else if (MXF_IS_METADATA_FILE_DESCRIPTOR (track->parent.descriptor[i]) &&
+ !MXF_IS_METADATA_MULTIPLE_DESCRIPTOR (track->parent.descriptor[i])) {
+ f = track->parent.descriptor[i];
+ }
+ }
+
+ if (!f) {
+ GST_ERROR ("No descriptor found for this track");
+ return NULL;
+ }
+
+ *handler = mxf_vc3_handle_essence_element;
+
+ caps = gst_caps_new_empty_simple ("video/x-dnxhd");
+ if (p) {
+ mxf_metadata_generic_picture_essence_descriptor_set_caps (p, caps);
+ } else {
+ GST_WARNING ("Only a generic file descriptor found");
+ }
+
+ if (!*tags)
+ *tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (*tags, GST_TAG_MERGE_APPEND, GST_TAG_VIDEO_CODEC,
+ "VC-3 Video", NULL);
+ *intra_only = TRUE;
+
+ return caps;
+ }
+
+ static const MXFEssenceElementHandler mxf_vc3_essence_element_handler = {
+ mxf_is_vc3_essence_track,
+ mxf_vc3_get_track_wrapping,
+ mxf_vc3_create_caps
+ };
+
+ static GstFlowReturn
+ mxf_vc3_write_func (GstBuffer * buffer, gpointer mapping_data,
+ GstAdapter * adapter, GstBuffer ** outbuf, gboolean flush)
+ {
+ *outbuf = buffer;
+ return GST_FLOW_OK;
+ }
+
+ /* FIXME: In which version was this added? Byte 7, assuming version 10 */
+ static const guint8 vc3_essence_container_ul[] = {
+ 0x06, 0x0e, 0x2b, 0x34, 0x04, 0x01, 0x01, 0x0A,
+ 0x0d, 0x01, 0x03, 0x01, 0x02, 0x11, 0x01, 0x00
+ };
+
+ static MXFMetadataFileDescriptor *
+ mxf_vc3_get_descriptor (GstPadTemplate * tmpl, GstCaps * caps,
+ MXFEssenceElementWriteFunc * handler, gpointer * mapping_data)
+ {
+ MXFMetadataCDCIPictureEssenceDescriptor *ret;
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (strcmp (gst_structure_get_name (s), "video/x-dnxhd") != 0) {
+ GST_ERROR ("Invalid caps %" GST_PTR_FORMAT, caps);
+ return NULL;
+ }
+
+ ret = (MXFMetadataCDCIPictureEssenceDescriptor *)
+ g_object_new (MXF_TYPE_METADATA_CDCI_PICTURE_ESSENCE_DESCRIPTOR, NULL);
+
+ memcpy (&ret->parent.parent.essence_container, &vc3_essence_container_ul, 16);
+
+ if (!mxf_metadata_generic_picture_essence_descriptor_from_caps (&ret->parent,
+ caps)) {
+ g_object_unref (ret);
+ return NULL;
+ }
+
+ *handler = mxf_vc3_write_func;
+
+ return (MXFMetadataFileDescriptor *) ret;
+ }
+
+ static void
+ mxf_vc3_update_descriptor (MXFMetadataFileDescriptor * d, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf)
+ {
+ return;
+ }
+
+ static void
+ mxf_vc3_get_edit_rate (MXFMetadataFileDescriptor * a, GstCaps * caps,
+ gpointer mapping_data, GstBuffer * buf, MXFMetadataSourcePackage * package,
+ MXFMetadataTimelineTrack * track, MXFFraction * edit_rate)
+ {
+ edit_rate->n = a->sample_rate.n;
+ edit_rate->d = a->sample_rate.d;
+ }
+
+ static guint32
+ mxf_vc3_get_track_number_template (MXFMetadataFileDescriptor * a,
+ GstCaps * caps, gpointer mapping_data)
+ {
+ return (0x15 << 24) | (0x0C << 8);
+ }
+
+ static MXFEssenceElementWriter mxf_vc3_essence_element_writer = {
+ mxf_vc3_get_descriptor,
+ mxf_vc3_update_descriptor,
+ mxf_vc3_get_edit_rate,
+ mxf_vc3_get_track_number_template,
+ NULL,
+ {{0,}}
+ };
+
+ void
+ mxf_vc3_init (void)
+ {
++ GstCaps *tmp = NULL;
+ mxf_essence_element_handler_register (&mxf_vc3_essence_element_handler);
+
+ mxf_vc3_essence_element_writer.pad_template =
+ gst_pad_template_new ("vc3_video_sink_%u", GST_PAD_SINK, GST_PAD_REQUEST,
++ tmp = gst_caps_from_string ("video/x-dnxhd, width = " GST_VIDEO_SIZE_RANGE
+ ", height = " GST_VIDEO_SIZE_RANGE ", framerate = "
+ GST_VIDEO_FPS_RANGE));
++ gst_caps_unref (tmp);
+ memcpy (&mxf_vc3_essence_element_writer.data_definition,
+ mxf_metadata_track_identifier_get (MXF_METADATA_TRACK_PICTURE_ESSENCE),
+ 16);
+ mxf_essence_element_writer_register (&mxf_vc3_essence_element_writer);
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim dot taymans at gmail dot com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-sdpdemux
+ * @title: sdpdemux
+ *
+ * sdpdemux currently understands SDP as the input format of the session description.
+ * For each stream listed in the SDP a new stream_\%u pad will be created
+ * with caps derived from the SDP media description. This is a caps of mime type
+ * "application/x-rtp" that can be connected to any available RTP depayloader
+ * element.
+ *
+ * sdpdemux will internally instantiate an RTP session manager element
+ * that will handle the RTCP messages to and from the server, jitter removal,
+ * packet reordering along with providing a clock for the pipeline.
+ *
+ * sdpdemux acts like a live element and will therefore only generate data in the
+ * PLAYING state.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 souphttpsrc location=http://some.server/session.sdp ! sdpdemux ! fakesink
+ * ]| Establish a connection to an HTTP server that contains an SDP session description
+ * that gets parsed by sdpdemux and send the raw RTP packets to a fakesink.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstsdpdemux.h"
+
+ #include <gst/rtp/gstrtppayloads.h>
+ #include <gst/sdp/gstsdpmessage.h>
+
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY_STATIC (sdpdemux_debug);
+ #define GST_CAT_DEFAULT (sdpdemux_debug)
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/sdp"));
+
+ static GstStaticPadTemplate rtptemplate = GST_STATIC_PAD_TEMPLATE ("stream_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ #define DEFAULT_DEBUG FALSE
+ #define DEFAULT_TIMEOUT 10000000
+ #define DEFAULT_LATENCY_MS 200
+ #define DEFAULT_REDIRECT TRUE
+
+ enum
+ {
+ PROP_0,
+ PROP_DEBUG,
+ PROP_TIMEOUT,
+ PROP_LATENCY,
+ PROP_REDIRECT
+ };
+
+ static void gst_sdp_demux_finalize (GObject * object);
+
+ static void gst_sdp_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_sdp_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static GstStateChangeReturn gst_sdp_demux_change_state (GstElement * element,
+ GstStateChange transition);
+ static void gst_sdp_demux_handle_message (GstBin * bin, GstMessage * message);
+
+ static void gst_sdp_demux_stream_push_event (GstSDPDemux * demux,
+ GstSDPStream * stream, GstEvent * event);
+
+ static gboolean gst_sdp_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static GstFlowReturn gst_sdp_demux_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+ /*static guint gst_sdp_demux_signals[LAST_SIGNAL] = { 0 }; */
+
+ #define gst_sdp_demux_parent_class parent_class
+ G_DEFINE_TYPE (GstSDPDemux, gst_sdp_demux, GST_TYPE_BIN);
+ GST_ELEMENT_REGISTER_DEFINE (sdpdemux, "sdpdemux", GST_RANK_NONE,
+ GST_TYPE_SDP_DEMUX);
+
+ static void
+ gst_sdp_demux_class_init (GstSDPDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbin_class = (GstBinClass *) klass;
+
+ gobject_class->set_property = gst_sdp_demux_set_property;
+ gobject_class->get_property = gst_sdp_demux_get_property;
+
+ gobject_class->finalize = gst_sdp_demux_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_DEBUG,
+ g_param_spec_boolean ("debug", "Debug",
+ "Dump request and response messages to stdout",
+ DEFAULT_DEBUG,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TIMEOUT,
+ g_param_spec_uint64 ("timeout", "Timeout",
+ "Fail transport after UDP timeout microseconds (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Amount of ms to buffer", 0, G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_REDIRECT,
+ g_param_spec_boolean ("redirect", "Redirect",
+ "Sends a redirection message instead of using a custom session element",
+ DEFAULT_REDIRECT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+ gst_element_class_add_static_pad_template (gstelement_class, &rtptemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class, "SDP session setup",
+ "Codec/Demuxer/Network/RTP",
+ "Receive data over the network via SDP",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstelement_class->change_state = gst_sdp_demux_change_state;
+
+ gstbin_class->handle_message = gst_sdp_demux_handle_message;
+
+ GST_DEBUG_CATEGORY_INIT (sdpdemux_debug, "sdpdemux", 0, "SDP demux");
+ }
+
+ static void
+ gst_sdp_demux_init (GstSDPDemux * demux)
+ {
+ demux->sinkpad = gst_pad_new_from_static_template (&sinktemplate, "sink");
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_sdp_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_sdp_demux_sink_chain));
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+ /* protects the streaming thread in interleaved mode or the polling
+ * thread in UDP mode. */
+ g_rec_mutex_init (&demux->stream_rec_lock);
+
+ demux->adapter = gst_adapter_new ();
+ }
+
+ static void
+ gst_sdp_demux_finalize (GObject * object)
+ {
+ GstSDPDemux *demux;
+
+ demux = GST_SDP_DEMUX (object);
+
+ /* free locks */
+ g_rec_mutex_clear (&demux->stream_rec_lock);
+
+ g_object_unref (demux->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_sdp_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstSDPDemux *demux;
+
+ demux = GST_SDP_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_DEBUG:
+ demux->debug = g_value_get_boolean (value);
+ break;
+ case PROP_TIMEOUT:
+ demux->udp_timeout = g_value_get_uint64 (value);
+ break;
+ case PROP_LATENCY:
+ demux->latency = g_value_get_uint (value);
+ break;
+ case PROP_REDIRECT:
+ demux->redirect = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_sdp_demux_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstSDPDemux *demux;
+
+ demux = GST_SDP_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_DEBUG:
+ g_value_set_boolean (value, demux->debug);
+ break;
+ case PROP_TIMEOUT:
+ g_value_set_uint64 (value, demux->udp_timeout);
+ break;
+ case PROP_LATENCY:
+ g_value_set_uint (value, demux->latency);
+ break;
+ case PROP_REDIRECT:
+ g_value_set_boolean (value, demux->redirect);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gint
+ find_stream_by_id (GstSDPStream * stream, gconstpointer a)
+ {
+ gint id = GPOINTER_TO_INT (a);
+
+ if (stream->id == id)
+ return 0;
+
+ return -1;
+ }
+
+ static gint
+ find_stream_by_pt (GstSDPStream * stream, gconstpointer a)
+ {
+ gint pt = GPOINTER_TO_INT (a);
+
+ if (stream->pt == pt)
+ return 0;
+
+ return -1;
+ }
+
+ static gint
+ find_stream_by_udpsrc (GstSDPStream * stream, gconstpointer a)
+ {
+ GstElement *src = (GstElement *) a;
+
+ if (stream->udpsrc[0] == src)
+ return 0;
+ if (stream->udpsrc[1] == src)
+ return 0;
+
+ return -1;
+ }
+
+ static GstSDPStream *
+ find_stream (GstSDPDemux * demux, gconstpointer data, gconstpointer func)
+ {
+ GList *lstream;
+
+ /* find and get stream */
+ if ((lstream =
+ g_list_find_custom (demux->streams, data, (GCompareFunc) func)))
+ return (GstSDPStream *) lstream->data;
+
+ return NULL;
+ }
+
+ static void
+ gst_sdp_demux_stream_free (GstSDPDemux * demux, GstSDPStream * stream)
+ {
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "free stream %p", stream);
+
+ if (stream->caps)
+ gst_caps_unref (stream->caps);
+
+ for (i = 0; i < 2; i++) {
+ GstElement *udpsrc = stream->udpsrc[i];
+
+ if (udpsrc) {
+ gst_element_set_state (udpsrc, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), udpsrc);
+ stream->udpsrc[i] = NULL;
+ }
+ }
+ if (stream->udpsink) {
+ gst_element_set_state (stream->udpsink, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), stream->udpsink);
+ stream->udpsink = NULL;
+ }
+ if (stream->srcpad) {
+ gst_pad_set_active (stream->srcpad, FALSE);
+ if (stream->added) {
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->srcpad);
+ stream->added = FALSE;
+ }
+ stream->srcpad = NULL;
+ }
+ g_free (stream);
+ }
+
+ static gboolean
+ is_multicast_address (const gchar * host_name)
+ {
+ GInetAddress *addr;
+ GResolver *resolver = NULL;
+ gboolean ret = FALSE;
+
+ addr = g_inet_address_new_from_string (host_name);
+ if (!addr) {
+ GList *results;
+
+ resolver = g_resolver_get_default ();
+ results = g_resolver_lookup_by_name (resolver, host_name, NULL, NULL);
+ if (!results)
+ goto out;
+ addr = G_INET_ADDRESS (g_object_ref (results->data));
+
+ g_resolver_free_addresses (results);
+ }
+ g_assert (addr != NULL);
+
+ ret = g_inet_address_get_is_multicast (addr);
+
+ out:
+ if (resolver)
+ g_object_unref (resolver);
+ if (addr)
+ g_object_unref (addr);
+ return ret;
+ }
+
+ static GstSDPStream *
+ gst_sdp_demux_create_stream (GstSDPDemux * demux, GstSDPMessage * sdp, gint idx)
+ {
+ GstSDPStream *stream;
+ const gchar *payload;
+ const GstSDPMedia *media;
+ const GstSDPConnection *conn;
+
+ /* get media, should not return NULL */
+ media = gst_sdp_message_get_media (sdp, idx);
+ if (media == NULL)
+ return NULL;
+
+ stream = g_new0 (GstSDPStream, 1);
+ stream->parent = demux;
+ /* we mark the pad as not linked, we will mark it as OK when we add the pad to
+ * the element. */
+ stream->last_ret = GST_FLOW_OK;
+ stream->added = FALSE;
+ stream->disabled = FALSE;
+ stream->id = demux->numstreams++;
+ stream->eos = FALSE;
+
+ /* we must have a payload. No payload means we cannot create caps */
+ /* FIXME, handle multiple formats. */
+ if ((payload = gst_sdp_media_get_format (media, 0))) {
+ GstStructure *s;
+
+ stream->pt = atoi (payload);
+ /* convert caps */
+ stream->caps = gst_sdp_media_get_caps_from_media (media, stream->pt);
+
+ s = gst_caps_get_structure (stream->caps, 0);
+ gst_structure_set_name (s, "application/x-rtp");
+
+ if (stream->pt >= 96) {
+ /* If we have a dynamic payload type, see if we have a stream with the
+ * same payload number. If there is one, they are part of the same
+ * container and we only need to add one pad. */
+ if (find_stream (demux, GINT_TO_POINTER (stream->pt),
+ (gpointer) find_stream_by_pt)) {
+ stream->container = TRUE;
+ }
+ }
+ }
+
+ if (gst_sdp_media_connections_len (media) > 0) {
+ if (!(conn = gst_sdp_media_get_connection (media, 0))) {
+ /* We should not reach this based on the check above */
+ goto no_connection;
+ }
+ } else {
+ if (!(conn = gst_sdp_message_get_connection (sdp))) {
+ goto no_connection;
+ }
+ }
+
+ if (!conn->address)
+ goto no_connection;
+
+ stream->destination = conn->address;
+ stream->ttl = conn->ttl;
+ stream->multicast = is_multicast_address (stream->destination);
+
+ stream->rtp_port = gst_sdp_media_get_port (media);
+ if (gst_sdp_media_get_attribute_val (media, "rtcp")) {
+ /* FIXME, RFC 3605 */
+ stream->rtcp_port = stream->rtp_port + 1;
+ } else {
+ stream->rtcp_port = stream->rtp_port + 1;
+ }
+
+ GST_DEBUG_OBJECT (demux, "stream %d, (%p)", stream->id, stream);
+ GST_DEBUG_OBJECT (demux, " pt: %d", stream->pt);
+ GST_DEBUG_OBJECT (demux, " container: %d", stream->container);
+ GST_DEBUG_OBJECT (demux, " caps: %" GST_PTR_FORMAT, stream->caps);
+
+ /* we keep track of all streams */
+ demux->streams = g_list_append (demux->streams, stream);
+
+ return stream;
+
+ /* ERRORS */
+ no_connection:
+ {
+ gst_sdp_demux_stream_free (demux, stream);
+ return NULL;
+ }
+ }
+
+ static void
+ gst_sdp_demux_cleanup (GstSDPDemux * demux)
+ {
+ GList *walk;
+
+ GST_DEBUG_OBJECT (demux, "cleanup");
+
+ for (walk = demux->streams; walk; walk = g_list_next (walk)) {
+ GstSDPStream *stream = (GstSDPStream *) walk->data;
+
+ gst_sdp_demux_stream_free (demux, stream);
+ }
+ g_list_free (demux->streams);
+ demux->streams = NULL;
+ if (demux->session) {
+ if (demux->session_sig_id) {
+ g_signal_handler_disconnect (demux->session, demux->session_sig_id);
+ demux->session_sig_id = 0;
+ }
+ if (demux->session_nmp_id) {
+ g_signal_handler_disconnect (demux->session, demux->session_nmp_id);
+ demux->session_nmp_id = 0;
+ }
+ if (demux->session_ptmap_id) {
+ g_signal_handler_disconnect (demux->session, demux->session_ptmap_id);
+ demux->session_ptmap_id = 0;
+ }
+ gst_element_set_state (demux->session, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), demux->session);
+ demux->session = NULL;
+ }
+ demux->numstreams = 0;
+ }
+
+ /* this callback is called when the session manager generated a new src pad with
+ * payloaded RTP packets. We simply ghost the pad here. */
+ static void
+ new_session_pad (GstElement * session, GstPad * pad, GstSDPDemux * demux)
+ {
+ gchar *name, *pad_name;
+ GstPadTemplate *template;
+ guint id, ssrc, pt;
+ GList *lstream;
+ GstSDPStream *stream;
+ gboolean all_added;
+
+ GST_DEBUG_OBJECT (demux, "got new session pad %" GST_PTR_FORMAT, pad);
+
+ GST_SDP_STREAM_LOCK (demux);
+ /* find stream */
+ name = gst_object_get_name (GST_OBJECT_CAST (pad));
+ if (sscanf (name, "recv_rtp_src_%u_%u_%u", &id, &ssrc, &pt) != 3)
+ goto unknown_stream;
+
+ GST_DEBUG_OBJECT (demux, "stream: %u, SSRC %u, PT %u", id, ssrc, pt);
+
+ stream =
+ find_stream (demux, GUINT_TO_POINTER (id), (gpointer) find_stream_by_id);
+ if (stream == NULL)
+ goto unknown_stream;
+
+ stream->ssrc = ssrc;
+
+ /* no need for a timeout anymore now */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout", (guint64) 0, NULL);
+
+ pad_name = g_strdup_printf ("stream_%u", stream->id);
+ /* create a new pad we will use to stream to */
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->srcpad = gst_ghost_pad_new_from_template (pad_name, pad, template);
+ gst_object_unref (template);
+ g_free (name);
+ g_free (pad_name);
+
+ stream->added = TRUE;
+ gst_pad_set_active (stream->srcpad, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), stream->srcpad);
+
+ /* check if we added all streams */
+ all_added = TRUE;
+ for (lstream = demux->streams; lstream; lstream = g_list_next (lstream)) {
+ stream = (GstSDPStream *) lstream->data;
+ /* a container stream only needs one pad added. Also disabled streams don't
+ * count */
+ if (!stream->container && !stream->disabled && !stream->added) {
+ all_added = FALSE;
+ break;
+ }
+ }
+ GST_SDP_STREAM_UNLOCK (demux);
+
+ if (all_added) {
+ GST_DEBUG_OBJECT (demux, "We added all streams");
+ /* when we get here, all stream are added and we can fire the no-more-pads
+ * signal. */
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ }
+
+ return;
+
+ /* ERRORS */
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (demux, "ignoring unknown stream");
+ GST_SDP_STREAM_UNLOCK (demux);
+ g_free (name);
+ return;
+ }
+ }
+
+ static void
+ rtsp_session_pad_added (GstElement * session, GstPad * pad, GstSDPDemux * demux)
+ {
+ GstPad *srcpad = NULL;
+ gchar *name;
+
+ GST_DEBUG_OBJECT (demux, "got new session pad %" GST_PTR_FORMAT, pad);
+
+ name = gst_pad_get_name (pad);
+ srcpad = gst_ghost_pad_new (name, pad);
+ g_free (name);
+
+ gst_pad_set_active (srcpad, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), srcpad);
+ }
+
+ static void
+ rtsp_session_no_more_pads (GstElement * session, GstSDPDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "got no-more-pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ }
+
+ static GstCaps *
+ request_pt_map (GstElement * sess, guint session, guint pt, GstSDPDemux * demux)
+ {
+ GstSDPStream *stream;
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (demux, "getting pt map for pt %d in session %d", pt,
+ session);
+
+ GST_SDP_STREAM_LOCK (demux);
+ stream =
+ find_stream (demux, GINT_TO_POINTER (session),
+ (gpointer) find_stream_by_id);
+ if (!stream)
+ goto unknown_stream;
+
+ caps = stream->caps;
+ if (caps)
+ gst_caps_ref (caps);
+ GST_SDP_STREAM_UNLOCK (demux);
+
+ return caps;
+
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (demux, "unknown stream %d", session);
+ GST_SDP_STREAM_UNLOCK (demux);
+ return NULL;
+ }
+ }
+
+ static void
+ gst_sdp_demux_do_stream_eos (GstSDPDemux * demux, guint session, guint32 ssrc)
+ {
+ GstSDPStream *stream;
+
+ GST_DEBUG_OBJECT (demux, "setting stream for session %u to EOS", session);
+
+ /* get stream for session */
+ stream =
+ find_stream (demux, GINT_TO_POINTER (session),
+ (gpointer) find_stream_by_id);
+ if (!stream)
+ goto unknown_stream;
+
+ if (stream->eos)
+ goto was_eos;
+
+ if (stream->ssrc != ssrc)
+ goto wrong_ssrc;
+
+ stream->eos = TRUE;
+ gst_sdp_demux_stream_push_event (demux, stream, gst_event_new_eos ());
+ return;
+
+ /* ERRORS */
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (demux, "unknown stream for session %u", session);
+ return;
+ }
+ was_eos:
+ {
+ GST_DEBUG_OBJECT (demux, "stream for session %u was already EOS", session);
+ return;
+ }
+ wrong_ssrc:
+ {
+ GST_DEBUG_OBJECT (demux, "unkown SSRC %08x for session %u", ssrc, session);
+ return;
+ }
+ }
+
+ static void
+ on_bye_ssrc (GstElement * manager, guint session, guint32 ssrc,
+ GstSDPDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "SSRC %08x in session %u received BYE", ssrc,
+ session);
+
+ gst_sdp_demux_do_stream_eos (demux, session, ssrc);
+ }
+
+ static void
+ on_timeout (GstElement * manager, guint session, guint32 ssrc,
+ GstSDPDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "SSRC %08x in session %u timed out", ssrc, session);
+
+ gst_sdp_demux_do_stream_eos (demux, session, ssrc);
+ }
+
+ /* try to get and configure a manager */
+ static gboolean
+ gst_sdp_demux_configure_manager (GstSDPDemux * demux, char *rtsp_sdp)
+ {
+ /* configure the session manager */
+ if (rtsp_sdp != NULL) {
+ if (!(demux->session = gst_element_factory_make ("rtspsrc", NULL)))
+ goto rtspsrc_failed;
+
+ g_object_set (demux->session, "location", rtsp_sdp, NULL);
+
+ GST_DEBUG_OBJECT (demux, "connect to signals on rtspsrc");
+ demux->session_sig_id =
+ g_signal_connect (demux->session, "pad-added",
+ (GCallback) rtsp_session_pad_added, demux);
+ demux->session_nmp_id =
+ g_signal_connect (demux->session, "no-more-pads",
+ (GCallback) rtsp_session_no_more_pads, demux);
+ } else {
+ if (!(demux->session = gst_element_factory_make ("rtpbin", NULL)))
+ goto manager_failed;
+
+ /* connect to signals if we did not already do so */
+ GST_DEBUG_OBJECT (demux, "connect to signals on session manager");
+ demux->session_sig_id =
+ g_signal_connect (demux->session, "pad-added",
+ (GCallback) new_session_pad, demux);
+ demux->session_ptmap_id =
+ g_signal_connect (demux->session, "request-pt-map",
+ (GCallback) request_pt_map, demux);
+ g_signal_connect (demux->session, "on-bye-ssrc", (GCallback) on_bye_ssrc,
+ demux);
+ g_signal_connect (demux->session, "on-bye-timeout", (GCallback) on_timeout,
+ demux);
+ g_signal_connect (demux->session, "on-timeout", (GCallback) on_timeout,
+ demux);
+ }
+
+ g_object_set (demux->session, "latency", demux->latency, NULL);
+
+ /* we manage this element */
+ gst_bin_add (GST_BIN_CAST (demux), demux->session);
+
+ return TRUE;
+
+ /* ERRORS */
+ manager_failed:
+ {
+ GST_DEBUG_OBJECT (demux, "no session manager element gstrtpbin found");
+ return FALSE;
+ }
+ rtspsrc_failed:
+ {
+ GST_DEBUG_OBJECT (demux, "no manager element rtspsrc found");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_sdp_demux_stream_configure_udp (GstSDPDemux * demux, GstSDPStream * stream)
+ {
+ gchar *uri, *name;
+ const gchar *destination;
+ GstPad *pad;
+
+ GST_DEBUG_OBJECT (demux, "creating UDP sources for multicast");
+
+ /* if the destination is not a multicast address, we just want to listen on
+ * our local ports */
+ if (!stream->multicast)
+ destination = "0.0.0.0";
+ else
+ destination = stream->destination;
+
+ /* creating UDP source */
+ if (stream->rtp_port != -1) {
+ GST_DEBUG_OBJECT (demux, "receiving RTP from %s:%d", destination,
+ stream->rtp_port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, stream->rtp_port);
+ stream->udpsrc[0] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[0] == NULL)
+ goto no_element;
+
+ /* take ownership */
+ gst_bin_add (GST_BIN_CAST (demux), stream->udpsrc[0]);
+
+ GST_DEBUG_OBJECT (demux,
+ "setting up UDP source with timeout %" G_GINT64_FORMAT,
+ demux->udp_timeout);
+
+ /* configure a timeout on the UDP port. When the timeout message is
+ * posted, we assume UDP transport is not possible. */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout",
+ demux->udp_timeout * 1000, NULL);
+
+ /* get output pad of the UDP source. */
+ pad = gst_element_get_static_pad (stream->udpsrc[0], "src");
+
+ name = g_strdup_printf ("recv_rtp_sink_%u", stream->id);
+ stream->channelpad[0] =
+ gst_element_request_pad_simple (demux->session, name);
+ g_free (name);
+
+ GST_DEBUG_OBJECT (demux, "connecting RTP source 0 to manager");
+ /* configure for UDP delivery, we need to connect the UDP pads to
+ * the session plugin. */
+ gst_pad_link (pad, stream->channelpad[0]);
+ gst_object_unref (pad);
+
+ /* change state */
+ gst_element_set_state (stream->udpsrc[0], GST_STATE_PAUSED);
+ }
+
+ /* creating another UDP source */
+ if (stream->rtcp_port != -1) {
+ GST_DEBUG_OBJECT (demux, "receiving RTCP from %s:%d", destination,
+ stream->rtcp_port);
+ uri = g_strdup_printf ("udp://%s:%d", destination, stream->rtcp_port);
+ stream->udpsrc[1] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[1] == NULL)
+ goto no_element;
+
+ /* take ownership */
+ gst_bin_add (GST_BIN_CAST (demux), stream->udpsrc[1]);
+
+ GST_DEBUG_OBJECT (demux, "connecting RTCP source to manager");
+
+ name = g_strdup_printf ("recv_rtcp_sink_%u", stream->id);
+ stream->channelpad[1] =
+ gst_element_request_pad_simple (demux->session, name);
+ g_free (name);
+
+ pad = gst_element_get_static_pad (stream->udpsrc[1], "src");
+ gst_pad_link (pad, stream->channelpad[1]);
+ gst_object_unref (pad);
+
+ gst_element_set_state (stream->udpsrc[1], GST_STATE_PAUSED);
+ }
+ return TRUE;
+
+ /* ERRORS */
+ no_element:
+ {
+ GST_DEBUG_OBJECT (demux, "no UDP source element found");
+ return FALSE;
+ }
+ }
+
+ /* configure the UDP sink back to the server for status reports */
+ static gboolean
+ gst_sdp_demux_stream_configure_udp_sink (GstSDPDemux * demux,
+ GstSDPStream * stream)
+ {
+ GstPad *pad, *sinkpad;
+ gint port;
+ GSocket *socket;
+ gchar *destination, *uri, *name;
+
+ /* get destination and port */
+ port = stream->rtcp_port;
+ destination = stream->destination;
+
+ GST_DEBUG_OBJECT (demux, "configure UDP sink for %s:%d", destination, port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, port);
+ stream->udpsink = gst_element_make_from_uri (GST_URI_SINK, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsink == NULL)
+ goto no_sink_element;
+
+ /* we clear all destinations because we don't really know where to send the
+ * RTCP to and we want to avoid sending it to our own ports.
+ * FIXME when we get an RTCP packet from the sender, we could look at its
+ * source port and address and try to send RTCP there. */
+ if (!stream->multicast)
+ g_signal_emit_by_name (stream->udpsink, "clear");
+
+ g_object_set (G_OBJECT (stream->udpsink), "auto-multicast", FALSE, NULL);
+ g_object_set (G_OBJECT (stream->udpsink), "loop", FALSE, NULL);
+ /* no sync needed */
+ g_object_set (G_OBJECT (stream->udpsink), "sync", FALSE, NULL);
+ /* no async state changes needed */
+ g_object_set (G_OBJECT (stream->udpsink), "async", FALSE, NULL);
+
+ if (stream->udpsrc[1]) {
+ /* configure socket, we give it the same UDP socket as the udpsrc for RTCP
+ * because some servers check the port number of where it sends RTCP to identify
+ * the RTCP packets it receives */
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "used_socket", &socket, NULL);
+ GST_DEBUG_OBJECT (demux, "UDP src has socket %p", socket);
+ /* configure socket and make sure udpsink does not close it when shutting
+ * down, it belongs to udpsrc after all. */
+ g_object_set (G_OBJECT (stream->udpsink), "socket", socket, NULL);
+ g_object_set (G_OBJECT (stream->udpsink), "close-socket", FALSE, NULL);
+ g_object_unref (socket);
+ }
+
+ /* we keep this playing always */
+ gst_element_set_locked_state (stream->udpsink, TRUE);
+ gst_element_set_state (stream->udpsink, GST_STATE_PLAYING);
+
+ gst_bin_add (GST_BIN_CAST (demux), stream->udpsink);
+
+ /* get session RTCP pad */
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
+ pad = gst_element_request_pad_simple (demux->session, name);
+ g_free (name);
+
+ /* and link */
+ if (pad) {
+ sinkpad = gst_element_get_static_pad (stream->udpsink, "sink");
+ gst_pad_link (pad, sinkpad);
+ gst_object_unref (pad);
+ gst_object_unref (sinkpad);
+ } else {
+ /* not very fatal, we just won't be able to send RTCP */
+ GST_WARNING_OBJECT (demux, "could not get session RTCP pad");
+ }
++ gst_object_unref(pad);
+
+ return TRUE;
+
+ /* ERRORS */
+ no_sink_element:
+ {
+ GST_DEBUG_OBJECT (demux, "no UDP sink element found");
+ return FALSE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_sdp_demux_combine_flows (GstSDPDemux * demux, GstSDPStream * stream,
+ GstFlowReturn ret)
+ {
+ GList *streams;
+
+ /* store the value */
+ stream->last_ret = ret;
+
+ /* if it's success we can return the value right away */
+ if (ret == GST_FLOW_OK)
+ goto done;
+
+ /* any other error that is not-linked can be returned right
+ * away */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+
+ /* only return NOT_LINKED if all other pads returned NOT_LINKED */
+ for (streams = demux->streams; streams; streams = g_list_next (streams)) {
+ GstSDPStream *ostream = (GstSDPStream *) streams->data;
+
+ ret = ostream->last_ret;
+ /* some other return value (must be SUCCESS but we can return
+ * other values as well) */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+ }
+ /* if we get here, all other pads were unlinked and we return
+ * NOT_LINKED then */
+ done:
+ return ret;
+ }
+
+ static void
+ gst_sdp_demux_stream_push_event (GstSDPDemux * demux, GstSDPStream * stream,
+ GstEvent * event)
+ {
+ /* only streams that have a connection to the outside world */
+ if (stream->srcpad == NULL)
+ goto done;
+
+ if (stream->channelpad[0]) {
+ gst_event_ref (event);
+ gst_pad_send_event (stream->channelpad[0], event);
+ }
+
+ if (stream->channelpad[1]) {
+ gst_event_ref (event);
+ gst_pad_send_event (stream->channelpad[1], event);
+ }
+
+ done:
+ gst_event_unref (event);
+ }
+
+ static void
+ gst_sdp_demux_handle_message (GstBin * bin, GstMessage * message)
+ {
+ GstSDPDemux *demux;
+
+ demux = GST_SDP_DEMUX (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ELEMENT:
+ {
+ const GstStructure *s = gst_message_get_structure (message);
+
+ if (gst_structure_has_name (s, "GstUDPSrcTimeout")) {
+ gboolean ignore_timeout;
+
+ GST_DEBUG_OBJECT (bin, "timeout on UDP port");
+
+ GST_OBJECT_LOCK (demux);
+ ignore_timeout = demux->ignore_timeout;
+ demux->ignore_timeout = TRUE;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* we only act on the first udp timeout message, others are irrelevant
+ * and can be ignored. */
+ if (ignore_timeout)
+ gst_message_unref (message);
+ else {
+ GST_ELEMENT_ERROR (demux, RESOURCE, READ, (NULL),
+ ("Could not receive any UDP packets for %.4f seconds, maybe your "
+ "firewall is blocking it.",
+ gst_guint64_to_gdouble (demux->udp_timeout / 1000000.0)));
+ }
+ return;
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ case GST_MESSAGE_ERROR:
+ {
+ GstObject *udpsrc;
+ GstSDPStream *stream;
+ GstFlowReturn ret;
+
+ udpsrc = GST_MESSAGE_SRC (message);
+
+ GST_DEBUG_OBJECT (demux, "got error from %s", GST_ELEMENT_NAME (udpsrc));
+
+ stream = find_stream (demux, udpsrc, (gpointer) find_stream_by_udpsrc);
+ /* fatal but not our message, forward */
+ if (!stream)
+ goto forward;
+
+ /* we ignore the RTCP udpsrc */
+ if (stream->udpsrc[1] == GST_ELEMENT_CAST (udpsrc))
+ goto done;
+
+ /* if we get error messages from the udp sources, that's not a problem as
+ * long as not all of them error out. We also don't really know what the
+ * problem is, the message does not give enough detail... */
+ ret = gst_sdp_demux_combine_flows (demux, stream, GST_FLOW_NOT_LINKED);
+ GST_DEBUG_OBJECT (demux, "combined flows: %s", gst_flow_get_name (ret));
+ if (ret != GST_FLOW_OK)
+ goto forward;
+
+ done:
+ gst_message_unref (message);
+ break;
+
+ forward:
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ default:
+ {
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ }
+ }
+
+ static gboolean
+ gst_sdp_demux_start (GstSDPDemux * demux)
+ {
+ guint8 *data = NULL;
+ guint size;
+ gint i, n_streams;
+ GstSDPMessage sdp = { 0 };
+ GstSDPStream *stream = NULL;
+ GList *walk;
+ gchar *uri = NULL;
+ GstStateChangeReturn ret;
+
+ /* grab the lock so that no state change can interfere */
+ GST_SDP_STREAM_LOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "parse SDP...");
+
+ size = gst_adapter_available (demux->adapter);
+ if (size == 0)
+ goto no_data;
+
+ data = gst_adapter_take (demux->adapter, size);
+
+ gst_sdp_message_init (&sdp);
+ if (gst_sdp_message_parse_buffer (data, size, &sdp) != GST_SDP_OK)
+ goto could_not_parse;
+
+ if (demux->debug)
+ gst_sdp_message_dump (&sdp);
+
+ /* maybe this is plain RTSP DESCRIBE rtsp and we should redirect */
+ /* look for rtsp control url */
+ {
+ const gchar *control;
+
+ for (i = 0;; i++) {
+ control = gst_sdp_message_get_attribute_val_n (&sdp, "control", i);
+ if (control == NULL)
+ break;
+
+ /* only take fully qualified urls */
+ if (g_str_has_prefix (control, "rtsp://"))
+ break;
+ }
+ if (!control) {
+ gint idx;
+
+ /* try to find non-aggragate control */
+ n_streams = gst_sdp_message_medias_len (&sdp);
+
+ for (idx = 0; idx < n_streams; idx++) {
+ const GstSDPMedia *media;
+
+ /* get media, should not return NULL */
+ media = gst_sdp_message_get_media (&sdp, idx);
+ if (media == NULL)
+ break;
+
+ for (i = 0;; i++) {
+ control = gst_sdp_media_get_attribute_val_n (media, "control", i);
+ if (control == NULL)
+ break;
+
+ /* only take fully qualified urls */
+ if (g_str_has_prefix (control, "rtsp://"))
+ break;
+ }
+ /* this media has no control, exit */
+ if (!control)
+ break;
+ }
+ }
+
+ if (control) {
+ /* we have RTSP now */
+ uri = gst_sdp_message_as_uri ("rtsp-sdp", &sdp);
+
+ if (demux->redirect) {
+ GST_INFO_OBJECT (demux, "redirect to %s", uri);
+
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING, uri, NULL)));
+ goto sent_redirect;
+ }
+ }
+ }
+
+ /* we get here when we didn't do a redirect */
+
+ /* try to get and configure a manager */
+ if (!gst_sdp_demux_configure_manager (demux, uri))
+ goto no_manager;
+ if (!uri) {
+ /* create streams with UDP sources and sinks */
+ n_streams = gst_sdp_message_medias_len (&sdp);
+ for (i = 0; i < n_streams; i++) {
+ stream = gst_sdp_demux_create_stream (demux, &sdp, i);
+
+ if (!stream)
+ continue;
+
+ GST_DEBUG_OBJECT (demux, "configuring transport for stream %p", stream);
+
+ if (!gst_sdp_demux_stream_configure_udp (demux, stream))
+ goto transport_failed;
+ if (!gst_sdp_demux_stream_configure_udp_sink (demux, stream))
+ goto transport_failed;
+ }
+
+ if (!demux->streams)
+ goto no_streams;
+ }
+
+ /* set target state on session manager */
+ /* setting rtspsrc to PLAYING may cause it to loose it that target state
+ * along the way due to no-preroll udpsrc elements, so ...
+ * do it in two stages here (similar to other elements) */
+ if (demux->target > GST_STATE_PAUSED) {
+ ret = gst_element_set_state (demux->session, GST_STATE_PAUSED);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto start_session_failure;
+ }
+ ret = gst_element_set_state (demux->session, demux->target);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto start_session_failure;
+
+ if (!uri) {
+ /* activate all streams */
+ for (walk = demux->streams; walk; walk = g_list_next (walk)) {
+ stream = (GstSDPStream *) walk->data;
+
+ /* configure target state on udp sources */
+ gst_element_set_state (stream->udpsrc[0], demux->target);
+ gst_element_set_state (stream->udpsrc[1], demux->target);
+ }
+ }
+ GST_SDP_STREAM_UNLOCK (demux);
+ gst_sdp_message_uninit (&sdp);
+ g_free (data);
+
+ return TRUE;
+
+ /* ERRORS */
+ done:
+ {
+ GST_SDP_STREAM_UNLOCK (demux);
+ gst_sdp_message_uninit (&sdp);
+ g_free (data);
+ return FALSE;
+ }
+ transport_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not create RTP stream transport."));
+ goto done;
+ }
+ no_manager:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not create RTP session manager."));
+ goto done;
+ }
+ no_data:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Empty SDP message."));
+ goto done;
+ }
+ could_not_parse:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not parse SDP message."));
+ goto done;
+ }
+ no_streams:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("No streams in SDP message."));
+ goto done;
+ }
+ sent_redirect:
+ {
+ /* avoid hanging if redirect not handled */
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Sent RTSP redirect."));
+ goto done;
+ }
+ start_session_failure:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not start RTP session manager."));
+ gst_element_set_state (demux->session, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (demux), demux->session);
+ demux->session = NULL;
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_sdp_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstSDPDemux *demux;
+ gboolean res = TRUE;
+
+ demux = GST_SDP_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ /* when we get EOS, start parsing the SDP */
+ res = gst_sdp_demux_start (demux);
+ gst_event_unref (event);
+ break;
+ default:
+ gst_event_unref (event);
+ break;
+ }
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_sdp_demux_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstSDPDemux *demux;
+
+ demux = GST_SDP_DEMUX (parent);
+
+ /* push the SDP message in an adapter, we start doing something with it when
+ * we receive EOS */
+ gst_adapter_push (demux->adapter, buffer);
+
+ return GST_FLOW_OK;
+ }
+
+ static GstStateChangeReturn
+ gst_sdp_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstSDPDemux *demux;
+ GstStateChangeReturn ret;
+
+ demux = GST_SDP_DEMUX (element);
+
+ GST_SDP_STREAM_LOCK (demux);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* first attempt, don't ignore timeouts */
+ gst_adapter_clear (demux->adapter);
+ demux->ignore_timeout = FALSE;
+ demux->target = GST_STATE_PAUSED;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ demux->target = GST_STATE_PLAYING;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ demux->target = GST_STATE_PAUSED;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_sdp_demux_cleanup (demux);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ done:
+ GST_SDP_STREAM_UNLOCK (demux);
+
+ return ret;
+ }
--- /dev/null
+ /*
+ * GStreamer
+ * Copyright (C) 2016 Vivia Nikolaidou <vivia@toolsonair.com>
+ *
+ * Based on gstvideoframe-audiolevel.c:
+ * Copyright (C) 2015 Vivia Nikolaidou <vivia@toolsonair.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-avwait
+ * @title: avwait
+ *
+ * This element will drop all buffers until a specific timecode or running
+ * time has been reached. It will then pass-through both audio and video,
+ * starting from that specific timecode or running time, making sure that
+ * audio starts as early as possible after the video (or at the same time as
+ * the video). In the "video-first" mode, it only drops audio buffers until
+ * video has started.
+ *
+ * The "recording" property acts essentially like a valve connected before
+ * everything else. If recording is FALSE, all buffers are dropped regardless
+ * of settings. If recording is TRUE, the other settings (mode,
+ * target-timecode, target-running-time, etc) are taken into account. Audio
+ * will always start and end together with the video, as long as the stream
+ * itself doesn't start too late or end too early.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location="my_file" ! decodebin name=d ! "audio/x-raw" ! avwait name=l target-timecode-str="00:00:04:00" ! autoaudiosink d. ! "video/x-raw" ! timecodestamper ! l. l. ! queue ! timeoverlay time-mode=time-code ! autovideosink
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstavwait.h"
+
+ #define GST_CAT_DEFAULT gst_avwait_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ static GstStaticPadTemplate audio_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("asink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw")
+ );
+
+ static GstStaticPadTemplate audio_src_template =
+ GST_STATIC_PAD_TEMPLATE ("asrc",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw")
+ );
+
+ static GstStaticPadTemplate video_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("vsink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw")
+ );
+
+ static GstStaticPadTemplate video_src_template =
+ GST_STATIC_PAD_TEMPLATE ("vsrc",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw")
+ );
+
+ #define parent_class gst_avwait_parent_class
+ G_DEFINE_TYPE (GstAvWait, gst_avwait, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE (avwait, "avwait", GST_RANK_NONE, GST_TYPE_AVWAIT);
+
+ enum
+ {
+ PROP_0,
+ PROP_TARGET_TIME_CODE,
+ PROP_TARGET_TIME_CODE_STRING,
+ PROP_TARGET_RUNNING_TIME,
+ PROP_END_TIME_CODE,
+ PROP_END_RUNNING_TIME,
+ PROP_RECORDING,
+ PROP_MODE
+ };
+
+ #define DEFAULT_TARGET_TIMECODE_STR "00:00:00:00"
+ #define DEFAULT_TARGET_RUNNING_TIME GST_CLOCK_TIME_NONE
+ #define DEFAULT_END_RUNNING_TIME GST_CLOCK_TIME_NONE
+ #define DEFAULT_MODE MODE_TIMECODE
+
+ /* flags for self->must_send_end_message */
+ enum
+ {
+ END_MESSAGE_NORMAL = 0,
+ END_MESSAGE_STREAM_ENDED = 1,
+ END_MESSAGE_VIDEO_PUSHED = 2,
+ END_MESSAGE_AUDIO_PUSHED = 4
+ };
+
+ static void gst_avwait_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_avwait_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+ static GstFlowReturn gst_avwait_asink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * inbuf);
+ static GstFlowReturn gst_avwait_vsink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * inbuf);
+ static gboolean gst_avwait_asink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_avwait_vsink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static GstIterator *gst_avwait_iterate_internal_links (GstPad *
+ pad, GstObject * parent);
+
+ static void gst_avwait_finalize (GObject * gobject);
+
+ static GstStateChangeReturn gst_avwait_change_state (GstElement *
+ element, GstStateChange transition);
+
+ static GType
+ gst_avwait_mode_get_type (void)
+ {
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_TIMECODE, "time code (default)", "timecode"},
+ {MODE_RUNNING_TIME, "running time", "running-time"},
+ {MODE_VIDEO_FIRST, "video first", "video-first"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAvWaitMode", values);
+ }
+ return gtype;
+ }
+
+ static void
+ gst_avwait_class_init (GstAvWaitClass * klass)
+ {
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_avwait_debug, "avwait", 0, "avwait");
+
+ gstelement_class = (GstElementClass *) klass;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Timecode Wait", "Filter/Audio/Video",
+ "Drops all audio/video until a specific timecode or running time has been reached",
+ "Vivia Nikolaidou <vivia@toolsonair.com>");
+
+ gobject_class->set_property = gst_avwait_set_property;
+ gobject_class->get_property = gst_avwait_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE_STRING,
+ g_param_spec_string ("target-timecode-string", "Target timecode (string)",
+ "Timecode to wait for in timecode mode (string). Must take the form 00:00:00:00",
+ DEFAULT_TARGET_TIMECODE_STR,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE,
+ g_param_spec_boxed ("target-timecode", "Target timecode (object)",
+ "Timecode to wait for in timecode mode (object)",
+ GST_TYPE_VIDEO_TIME_CODE,
+ GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TARGET_RUNNING_TIME,
+ g_param_spec_uint64 ("target-running-time", "Target running time",
+ "Running time to wait for in running-time mode",
+ 0, G_MAXUINT64,
+ DEFAULT_TARGET_RUNNING_TIME,
+ GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Operation mode: What to wait for",
+ GST_TYPE_AVWAIT_MODE,
+ DEFAULT_MODE,
+ GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_END_TIME_CODE,
+ g_param_spec_boxed ("end-timecode", "End timecode (object)",
+ "Timecode to end at in timecode mode (object)",
+ GST_TYPE_VIDEO_TIME_CODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_END_RUNNING_TIME,
+ g_param_spec_uint64 ("end-running-time", "End running time",
+ "Running time to end at in running-time mode",
+ 0, G_MAXUINT64,
+ DEFAULT_END_RUNNING_TIME,
+ GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RECORDING,
+ g_param_spec_boolean ("recording",
+ "Recording state",
+ "Whether the element is stopped or recording. "
+ "If set to FALSE, all buffers will be dropped regardless of settings.",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = gst_avwait_finalize;
+ gstelement_class->change_state = gst_avwait_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_sink_template);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_sink_template);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AVWAIT_MODE, 0);
+ }
+
+ static void
+ gst_avwait_init (GstAvWait * self)
+ {
+ self->asinkpad =
+ gst_pad_new_from_static_template (&audio_sink_template, "asink");
+ gst_pad_set_chain_function (self->asinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_asink_chain));
+ gst_pad_set_event_function (self->asinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_asink_event));
+ gst_pad_set_iterate_internal_links_function (self->asinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
+ gst_element_add_pad (GST_ELEMENT (self), self->asinkpad);
+
+ self->vsinkpad =
+ gst_pad_new_from_static_template (&video_sink_template, "vsink");
+ gst_pad_set_chain_function (self->vsinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_vsink_chain));
+ gst_pad_set_event_function (self->vsinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_vsink_event));
+ gst_pad_set_iterate_internal_links_function (self->vsinkpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
+ gst_element_add_pad (GST_ELEMENT (self), self->vsinkpad);
+
+ self->asrcpad =
+ gst_pad_new_from_static_template (&audio_src_template, "asrc");
+ gst_pad_set_iterate_internal_links_function (self->asrcpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
+ gst_element_add_pad (GST_ELEMENT (self), self->asrcpad);
+
+ self->vsrcpad =
+ gst_pad_new_from_static_template (&video_src_template, "vsrc");
+ gst_pad_set_iterate_internal_links_function (self->vsrcpad,
+ GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
+ gst_element_add_pad (GST_ELEMENT (self), self->vsrcpad);
+
+ GST_PAD_SET_PROXY_CAPS (self->asinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->asinkpad);
+
+ GST_PAD_SET_PROXY_CAPS (self->asrcpad);
+ GST_PAD_SET_PROXY_SCHEDULING (self->asrcpad);
+
+ GST_PAD_SET_PROXY_CAPS (self->vsinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->vsinkpad);
+
+ GST_PAD_SET_PROXY_CAPS (self->vsrcpad);
+ GST_PAD_SET_PROXY_SCHEDULING (self->vsrcpad);
+
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
+ self->first_audio_running_time = GST_CLOCK_TIME_NONE;
+ self->last_seen_tc = NULL;
+
+ self->video_eos_flag = FALSE;
+ self->audio_eos_flag = FALSE;
+ self->video_flush_flag = FALSE;
+ self->audio_flush_flag = FALSE;
+ self->shutdown_flag = FALSE;
+ self->dropping = TRUE;
+ self->tc = gst_video_time_code_new_empty ();
+ self->end_tc = NULL;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->recording = TRUE;
+
+ self->target_running_time = DEFAULT_TARGET_RUNNING_TIME;
+ self->end_running_time = DEFAULT_TARGET_RUNNING_TIME;
+ self->mode = DEFAULT_MODE;
+
+ gst_video_info_init (&self->vinfo);
+ g_mutex_init (&self->mutex);
+ g_cond_init (&self->cond);
+ g_cond_init (&self->audio_cond);
+ }
+
+ static void
+ gst_avwait_send_element_message (GstAvWait * self, gboolean dropping,
+ GstClockTime running_time)
+ {
+ if (!gst_element_post_message (GST_ELEMENT (self),
+ gst_message_new_element (GST_OBJECT (self),
+ gst_structure_new ("avwait-status",
+ "dropping", G_TYPE_BOOLEAN, dropping,
+ "running-time", GST_TYPE_CLOCK_TIME, running_time, NULL)))) {
+ GST_ERROR_OBJECT (self, "Unable to send element message!");
+ g_assert_not_reached ();
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_avwait_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstAvWait *self = GST_AVWAIT (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ g_mutex_lock (&self->mutex);
+ self->shutdown_flag = TRUE;
+ g_cond_signal (&self->cond);
+ g_cond_signal (&self->audio_cond);
+ g_mutex_unlock (&self->mutex);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ g_mutex_lock (&self->mutex);
+ self->shutdown_flag = FALSE;
+ self->video_eos_flag = FALSE;
+ self->audio_eos_flag = FALSE;
+ self->video_flush_flag = FALSE;
+ self->audio_flush_flag = FALSE;
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ g_mutex_unlock (&self->mutex);
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:{
+ gboolean send_message = FALSE;
+
+ g_mutex_lock (&self->mutex);
+ if (self->mode != MODE_RUNNING_TIME) {
+ GST_DEBUG_OBJECT (self, "First time reset in paused to ready");
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ }
+ if (!self->dropping) {
+ self->dropping = TRUE;
+ send_message = TRUE;
+ }
+ gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
+ self->asegment.position = GST_CLOCK_TIME_NONE;
+ gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
+ self->vsegment.position = GST_CLOCK_TIME_NONE;
+ gst_video_info_init (&self->vinfo);
+ self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
+ self->first_audio_running_time = GST_CLOCK_TIME_NONE;
+ if (self->last_seen_tc)
+ gst_video_time_code_free (self->last_seen_tc);
+ self->last_seen_tc = NULL;
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_avwait_finalize (GObject * object)
+ {
+ GstAvWait *self = GST_AVWAIT (object);
+
+ if (self->tc) {
+ gst_video_time_code_free (self->tc);
+ self->tc = NULL;
+ }
+
+ if (self->end_tc) {
+ gst_video_time_code_free (self->end_tc);
+ self->end_tc = NULL;
+ }
+
+ g_mutex_clear (&self->mutex);
+ g_cond_clear (&self->cond);
+ g_cond_clear (&self->audio_cond);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_avwait_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstAvWait *self = GST_AVWAIT (object);
+
+ switch (prop_id) {
+ case PROP_TARGET_TIME_CODE_STRING:{
+ g_mutex_lock (&self->mutex);
+ if (self->tc)
+ g_value_take_string (value, gst_video_time_code_to_string (self->tc));
+ else
+ g_value_set_string (value, DEFAULT_TARGET_TIMECODE_STR);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_TARGET_TIME_CODE:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_boxed (value, self->tc);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_END_TIME_CODE:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_boxed (value, self->end_tc);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_TARGET_RUNNING_TIME:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_uint64 (value, self->target_running_time);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_END_RUNNING_TIME:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_uint64 (value, self->end_running_time);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_RECORDING:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_boolean (value, self->recording);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_MODE:{
+ g_mutex_lock (&self->mutex);
+ g_value_set_enum (value, self->mode);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_avwait_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstAvWait *self = GST_AVWAIT (object);
+
+ switch (prop_id) {
+ case PROP_TARGET_TIME_CODE_STRING:{
+ gchar **parts;
+ const gchar *tc_str;
+ guint hours, minutes, seconds, frames;
+
+ tc_str = g_value_get_string (value);
+ parts = g_strsplit (tc_str, ":", 4);
+ if (!parts || parts[3] == NULL) {
+ GST_ERROR_OBJECT (self,
+ "Error: Could not parse timecode %s. Please input a timecode in the form 00:00:00:00",
+ tc_str);
+ g_strfreev (parts);
+ return;
+ }
+ hours = g_ascii_strtoll (parts[0], NULL, 10);
+ minutes = g_ascii_strtoll (parts[1], NULL, 10);
+ seconds = g_ascii_strtoll (parts[2], NULL, 10);
+ frames = g_ascii_strtoll (parts[3], NULL, 10);
+ g_mutex_lock (&self->mutex);
+ if (self->tc)
+ gst_video_time_code_free (self->tc);
+ self->tc = gst_video_time_code_new (0, 1, NULL, 0, hours, minutes,
+ seconds, frames, 0);
+ if (GST_VIDEO_INFO_FORMAT (&self->vinfo) != GST_VIDEO_FORMAT_UNKNOWN
+ && self->vinfo.fps_n != 0) {
+ self->tc->config.fps_n = self->vinfo.fps_n;
+ self->tc->config.fps_d = self->vinfo.fps_d;
+ }
+ g_mutex_unlock (&self->mutex);
+ g_strfreev (parts);
+ break;
+ }
+ case PROP_TARGET_TIME_CODE:{
+ g_mutex_lock (&self->mutex);
+ if (self->tc)
+ gst_video_time_code_free (self->tc);
+ self->tc = g_value_dup_boxed (value);
+ if (self->tc && self->tc->config.fps_n == 0
+ && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
+ GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
+ self->tc->config.fps_n = self->vinfo.fps_n;
+ self->tc->config.fps_d = self->vinfo.fps_d;
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_END_TIME_CODE:{
+ g_mutex_lock (&self->mutex);
+ if (self->end_tc)
+ gst_video_time_code_free (self->end_tc);
+ self->end_tc = g_value_dup_boxed (value);
+ if (self->end_tc && self->end_tc->config.fps_n == 0
+ && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
+ GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
+ self->end_tc->config.fps_n = self->vinfo.fps_n;
+ self->end_tc->config.fps_d = self->vinfo.fps_d;
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_TARGET_RUNNING_TIME:{
+ g_mutex_lock (&self->mutex);
+ self->target_running_time = g_value_get_uint64 (value);
+ if (self->mode == MODE_RUNNING_TIME) {
+ if (self->target_running_time > self->last_seen_video_running_time) {
+ self->dropping = TRUE;
+ }
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_END_RUNNING_TIME:{
+ g_mutex_lock (&self->mutex);
+ self->end_running_time = g_value_get_uint64 (value);
+ if (self->mode == MODE_RUNNING_TIME) {
+ if (self->end_running_time >= self->last_seen_video_running_time) {
+ self->dropping = TRUE;
+ }
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_MODE:{
+ GstAvWaitMode old_mode;
+
+ g_mutex_lock (&self->mutex);
+ old_mode = self->mode;
+ self->mode = g_value_get_enum (value);
+ if (self->mode != old_mode) {
+ switch (self->mode) {
+ case MODE_TIMECODE:
+ if (self->last_seen_tc && self->tc &&
+ gst_video_time_code_compare (self->last_seen_tc,
+ self->tc) < 0) {
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->dropping = TRUE;
+ }
+ break;
+ case MODE_RUNNING_TIME:
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ if (self->target_running_time > self->last_seen_video_running_time
+ || self->end_running_time >=
+ self->last_seen_video_running_time) {
+ self->dropping = TRUE;
+ }
+ break;
+ /* Let the chain functions handle the rest */
+ case MODE_VIDEO_FIRST:
+ /* pass-through */
+ default:
+ break;
+ }
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case PROP_RECORDING:{
+ g_mutex_lock (&self->mutex);
+ self->recording = g_value_get_boolean (value);
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ gst_avwait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstAvWait *self = GST_AVWAIT (parent);
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:{
+ GstSegment segment;
+ gboolean send_message = FALSE;
+ gboolean segment_changed;
+
+ g_mutex_lock (&self->mutex);
+ gst_event_copy_segment (event, &segment);
+ segment.position = self->vsegment.position;
+ segment_changed = !gst_segment_is_equal (&segment, &self->vsegment);
+ self->vsegment = segment;
+ if (self->vsegment.format != GST_FORMAT_TIME) {
+ GST_ERROR_OBJECT (self, "Invalid segment format");
+ g_mutex_unlock (&self->mutex);
+ gst_event_unref (event);
+ return FALSE;
+ }
+ if (segment_changed) {
+ GST_DEBUG_OBJECT (self, "First time reset in video segment");
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ if (!self->dropping) {
+ self->dropping = TRUE;
+ send_message = TRUE;
+ }
+ self->vsegment.position = GST_CLOCK_TIME_NONE;
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
+ break;
+ }
+ case GST_EVENT_GAP:
+ gst_event_unref (event);
+ return TRUE;
+ case GST_EVENT_EOS:{
+ GstClockTime running_time;
+ gboolean send_message = FALSE;
+ GstClockTime audio_running_time_to_end_at;
+
+ g_mutex_lock (&self->mutex);
+ self->video_eos_flag = TRUE;
+
+ /* If we were recording then we'd be done with it at EOS of the video
+ * pad once the audio has caught up, if it has to */
+ running_time = self->last_seen_video_running_time;
+ if (self->was_recording) {
+ GST_INFO_OBJECT (self, "Recording stopped at EOS at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+
+ if (running_time > self->running_time_to_wait_for
+ && running_time <= self->running_time_to_end_at) {
+ /* We just stopped recording: synchronise the audio */
+ self->audio_running_time_to_end_at = running_time;
+ self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
+ } else if (running_time < self->running_time_to_wait_for
+ && self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ g_cond_signal (&self->cond);
+
+ if (self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED) {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ send_message = TRUE;
+ audio_running_time_to_end_at = self->audio_running_time_to_end_at;
+ } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
+ self->must_send_end_message |= END_MESSAGE_VIDEO_PUSHED;
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE,
+ audio_running_time_to_end_at);
+ break;
+ }
+ case GST_EVENT_FLUSH_START:
+ g_mutex_lock (&self->mutex);
+ self->video_flush_flag = TRUE;
+ g_cond_signal (&self->audio_cond);
+ g_mutex_unlock (&self->mutex);
+ break;
+ case GST_EVENT_FLUSH_STOP:{
+ gboolean send_message = FALSE;
+
+ g_mutex_lock (&self->mutex);
+ self->video_flush_flag = FALSE;
+ GST_DEBUG_OBJECT (self, "First time reset in video flush");
+ self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
+ if (!self->dropping) {
+ self->dropping = TRUE;
+ send_message = TRUE;
+ }
+ gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
+ self->vsegment.position = GST_CLOCK_TIME_NONE;
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
+ break;
+ }
+ case GST_EVENT_CAPS:{
+ GstCaps *caps;
+ gst_event_parse_caps (event, &caps);
+ GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
+ g_mutex_lock (&self->mutex);
+ if (!gst_video_info_from_caps (&self->vinfo, caps)) {
+ gst_event_unref (event);
+ g_mutex_unlock (&self->mutex);
+ return FALSE;
+ }
+ if (self->tc && self->tc->config.fps_n == 0 && self->vinfo.fps_n != 0) {
+ self->tc->config.fps_n = self->vinfo.fps_n;
+ self->tc->config.fps_d = self->vinfo.fps_d;
+ }
+ if (self->end_tc && self->end_tc->config.fps_n == 0
+ && self->vinfo.fps_n != 0) {
+ self->end_tc->config.fps_n = self->vinfo.fps_n;
+ self->end_tc->config.fps_d = self->vinfo.fps_d;
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ default:
+ break;
+ }
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static gboolean
+ gst_avwait_asink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstAvWait *self = GST_AVWAIT (parent);
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:{
+ GstSegment segment;
+ gboolean segment_changed;
+
+ g_mutex_lock (&self->mutex);
+ gst_event_copy_segment (event, &segment);
+ segment.position = self->asegment.position;
+ segment_changed = !gst_segment_is_equal (&segment, &self->asegment);
+ self->asegment = segment;
+
+ if (self->asegment.format != GST_FORMAT_TIME) {
+ GST_ERROR_OBJECT (self, "Invalid segment format");
+ g_mutex_unlock (&self->mutex);
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ if (segment_changed) {
+ self->asegment.position = GST_CLOCK_TIME_NONE;
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ case GST_EVENT_FLUSH_START:
+ g_mutex_lock (&self->mutex);
+ self->audio_flush_flag = TRUE;
+ g_cond_signal (&self->cond);
+ g_mutex_unlock (&self->mutex);
+ break;
+ case GST_EVENT_EOS:{
+ gboolean send_message = FALSE;
+ GstClockTime audio_running_time_to_end_at;
+
+ g_mutex_lock (&self->mutex);
+ self->audio_eos_flag = TRUE;
+ g_cond_signal (&self->audio_cond);
+
+ if ((self->must_send_end_message & END_MESSAGE_VIDEO_PUSHED)) {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ audio_running_time_to_end_at = self->audio_running_time_to_end_at;
+ send_message = TRUE;
+ } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
+ self->must_send_end_message |= END_MESSAGE_AUDIO_PUSHED;
+ } else {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE,
+ audio_running_time_to_end_at);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ g_mutex_lock (&self->mutex);
+ self->audio_flush_flag = FALSE;
+ gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
+ self->asegment.position = GST_CLOCK_TIME_NONE;
+ g_mutex_unlock (&self->mutex);
+ break;
+ case GST_EVENT_CAPS:{
+ GstCaps *caps;
+ gst_event_parse_caps (event, &caps);
+ GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
+ g_mutex_lock (&self->mutex);
+ if (!gst_audio_info_from_caps (&self->ainfo, caps)) {
+ g_mutex_unlock (&self->mutex);
+ gst_event_unref (event);
+ return FALSE;
+ }
+ g_mutex_unlock (&self->mutex);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static GstFlowReturn
+ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
+ {
+ GstClockTime timestamp;
+ GstAvWait *self = GST_AVWAIT (parent);
+ GstClockTime running_time;
+ GstVideoTimeCode *tc = NULL;
+ GstVideoTimeCodeMeta *tc_meta;
+ gboolean retry = FALSE;
+ gboolean ret = GST_FLOW_OK;
+ gboolean send_message = FALSE;
+ GstClockTime message_running_time;
+ gboolean message_dropping;
+
+ timestamp = GST_BUFFER_TIMESTAMP (inbuf);
+ if (timestamp == GST_CLOCK_TIME_NONE) {
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_ERROR;
+ }
+
+ g_mutex_lock (&self->mutex);
+ self->vsegment.position = timestamp;
+ running_time =
+ gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
+ self->vsegment.position);
+ self->last_seen_video_running_time = running_time;
+
+ tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
+ if (tc_meta) {
+ tc = gst_video_time_code_copy (&tc_meta->tc);
+ if (self->last_seen_tc) {
+ gst_video_time_code_free (self->last_seen_tc);
+ }
+ self->last_seen_tc = tc;
+ }
+
+ while (self->mode == MODE_VIDEO_FIRST
+ && self->first_audio_running_time == GST_CLOCK_TIME_NONE
+ && !self->audio_eos_flag
+ && !self->shutdown_flag && !self->video_flush_flag) {
+ GST_DEBUG_OBJECT (self, "Waiting for first audio buffer");
+ g_cond_wait (&self->audio_cond, &self->mutex);
+ }
+
+ if (self->video_flush_flag || self->shutdown_flag) {
+ GST_DEBUG_OBJECT (self, "Shutting down, ignoring buffer");
+ gst_buffer_unref (inbuf);
+ g_mutex_unlock (&self->mutex);
+ return GST_FLOW_FLUSHING;
+ }
+
+ switch (self->mode) {
+ case MODE_TIMECODE:{
+ if (self->tc && self->end_tc
+ && gst_video_time_code_compare (self->tc, self->end_tc) != -1) {
+ gchar *tc_str, *end_tc;
+
+ tc_str = gst_video_time_code_to_string (self->tc);
+ end_tc = gst_video_time_code_to_string (self->end_tc);
+ GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
+ ("End timecode %s must be after start timecode %s. Start timecode rejected",
+ end_tc, tc_str));
+ g_free (end_tc);
+ g_free (tc_str);
+ gst_buffer_unref (inbuf);
+ g_mutex_unlock (&self->mutex);
+ return GST_FLOW_ERROR;
+ }
+
+ if (self->tc != NULL && tc != NULL) {
+ gboolean emit_passthrough_signal = FALSE;
+
+ if (gst_video_time_code_compare (tc, self->tc) < 0
+ && self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (self, "Timecode not yet reached, ignoring frame");
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ } else if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
+ GST_INFO_OBJECT (self, "Target timecode reached at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (self->vsegment.position));
+ /* Don't emit a signal if we weren't dropping (e.g. settings changed
+ * mid-flight) */
+ emit_passthrough_signal = self->dropping;
+ self->dropping = FALSE;
+ self->running_time_to_wait_for = running_time;
+ if (self->recording) {
+ self->audio_running_time_to_wait_for =
+ self->running_time_to_wait_for;
+ }
+ }
+
+ if (self->end_tc && gst_video_time_code_compare (tc, self->end_tc) >= 0) {
+ if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
+ GST_INFO_OBJECT (self, "End timecode reached at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (self->vsegment.position));
+ self->dropping = TRUE;
+ self->running_time_to_end_at = running_time;
+ if (self->recording) {
+ self->audio_running_time_to_end_at = self->running_time_to_end_at;
+ self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
+ }
+ }
+
+ if (inbuf) {
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ }
+ } else if (emit_passthrough_signal && self->recording) {
+ send_message = TRUE;
+ message_running_time = self->running_time_to_wait_for;
+ message_dropping = FALSE;
+ }
+ }
+ break;
+ }
+ case MODE_RUNNING_TIME:{
+ gboolean emit_passthrough_signal = FALSE;
+
+ if (self->target_running_time != GST_CLOCK_TIME_NONE
+ && running_time < self->target_running_time) {
+ GST_DEBUG_OBJECT (self,
+ "Have %" GST_TIME_FORMAT ", waiting for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (self->target_running_time));
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ } else if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
+ /* Don't emit a signal if we weren't dropping (e.g. settings changed
+ * mid-flight) */
+ emit_passthrough_signal = self->dropping;
+ self->dropping = FALSE;
+ self->running_time_to_wait_for = running_time;
+ if (self->recording) {
+ self->audio_running_time_to_wait_for = running_time;
+ }
+ if (self->recording) {
+ send_message = TRUE;
+ message_running_time = running_time;
+ message_dropping = FALSE;
+ }
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (self->end_running_time)
+ && running_time >= self->end_running_time) {
+ if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
+ GST_INFO_OBJECT (self,
+ "End running time %" GST_TIME_FORMAT " reached at %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (self->end_running_time),
+ GST_TIME_ARGS (self->vsegment.position));
+ self->dropping = TRUE;
+ self->running_time_to_end_at = running_time;
+ if (self->recording) {
+ self->audio_running_time_to_end_at = running_time;
+ self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
+ }
+ }
+
+ if (inbuf) {
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ }
+ } else if (emit_passthrough_signal && self->recording) {
+ send_message = TRUE;
+ message_running_time = self->running_time_to_wait_for;
+ message_dropping = FALSE;
+ }
+
+ break;
+ }
+ case MODE_VIDEO_FIRST:{
+ if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
+ self->running_time_to_wait_for = running_time;
+ GST_DEBUG_OBJECT (self, "First video running time is %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (self->running_time_to_wait_for));
+ if (self->recording) {
+ self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
+ }
+ if (self->dropping) {
+ self->dropping = FALSE;
+ if (self->recording) {
+ send_message = TRUE;
+ message_running_time = self->running_time_to_wait_for;
+ message_dropping = FALSE;
+ }
+ }
+ }
+ break;
+ }
+ }
+
+ if (!self->recording) {
+ if (self->was_recording) {
+ GST_INFO_OBJECT (self, "Recording stopped at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+
+ if (running_time > self->running_time_to_wait_for
+ && (running_time <= self->running_time_to_end_at
+ || self->running_time_to_end_at == GST_CLOCK_TIME_NONE)) {
+ /* We just stopped recording: synchronise the audio */
+ if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE)
+ self->running_time_to_end_at = running_time;
+ self->audio_running_time_to_end_at = running_time;
+ self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
+ } else if (running_time < self->running_time_to_wait_for
+ && self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
+ self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ /* Recording is FALSE: we drop all buffers */
+ if (inbuf) {
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ }
+ } else {
+ if (!self->was_recording) {
+ GST_INFO_OBJECT (self,
+ "Recording started at %" GST_TIME_FORMAT " waiting for %"
+ GST_TIME_FORMAT " inbuf %p", GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (self->running_time_to_wait_for), inbuf);
+
+ if (self->mode != MODE_VIDEO_FIRST ||
+ self->first_audio_running_time <= running_time ||
+ self->audio_eos_flag) {
+ if (running_time < self->running_time_to_end_at ||
+ self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
+ /* We are before the end of the recording. Check if we just actually
+ * started */
+ if (self->running_time_to_wait_for != GST_CLOCK_TIME_NONE
+ && running_time > self->running_time_to_wait_for) {
+ /* We just started recording: synchronise the audio */
+ self->audio_running_time_to_wait_for = running_time;
+ send_message = TRUE;
+ message_running_time = running_time;
+ message_dropping = FALSE;
+ } else {
+ /* We will start in the future when running_time_to_wait_for is
+ * reached */
+ self->audio_running_time_to_wait_for =
+ self->running_time_to_wait_for;
+ }
+ self->audio_running_time_to_end_at = self->running_time_to_end_at;
+ }
+ } else {
+ /* We are in video-first mode and behind the first audio timestamp. We
+ * should drop all video buffers until the first audio timestamp, so
+ * we can catch up with it. (In timecode mode and running-time mode, we
+ * don't care about when the audio starts, we start as soon as the
+ * target timecode or running time has been reached) */
+ if (inbuf) {
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ }
+ retry = TRUE;
+ }
+ }
+ }
+
+ if (!retry)
+ self->was_recording = self->recording;
+ g_cond_signal (&self->cond);
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, message_dropping,
+ message_running_time);
+ send_message = FALSE;
+
+ if (inbuf) {
+ GST_DEBUG_OBJECT (self,
+ "Pass video buffer %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gst_segment_to_running_time (&self->vsegment,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (inbuf))),
+ GST_TIME_ARGS (gst_segment_to_running_time (&self->vsegment,
+ GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (inbuf) + GST_BUFFER_DURATION (inbuf))));
+ ret = gst_pad_push (self->vsrcpad, inbuf);
+ }
+
+ g_mutex_lock (&self->mutex);
+ if (self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED) {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ send_message = TRUE;
+ message_dropping = TRUE;
+ message_running_time = self->audio_running_time_to_end_at;
+ } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
+ if (self->audio_eos_flag) {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ send_message = TRUE;
+ message_dropping = TRUE;
+ message_running_time = self->audio_running_time_to_end_at;
+ } else {
+ self->must_send_end_message |= END_MESSAGE_VIDEO_PUSHED;
+ }
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, message_dropping,
+ message_running_time);
+
+ return ret;
+ }
+
+ /*
+ * assumes sign1 and sign2 are either 1 or -1
+ * returns 0 if sign1*num1 == sign2*num2
+ * -1 if sign1*num1 < sign2*num2
+ * 1 if sign1*num1 > sign2*num2
+ */
+ static gint
+ gst_avwait_compare_guint64_with_signs (gint sign1,
+ guint64 num1, gint sign2, guint64 num2)
+ {
+ if (sign1 != sign2)
+ return sign1;
+ else if (num1 == num2)
+ return 0;
+ else
+ return num1 > num2 ? sign1 : -sign1;
+ }
+
+ static GstFlowReturn
+ gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
+ {
+ GstClockTime timestamp;
+ GstAvWait *self = GST_AVWAIT (parent);
+ GstClockTime current_running_time;
+ GstClockTime video_running_time = GST_CLOCK_TIME_NONE;
+ GstClockTime duration;
+ GstClockTime running_time_at_end = GST_CLOCK_TIME_NONE;
+ gint asign, vsign = 1, esign = 1;
+ GstFlowReturn ret = GST_FLOW_OK;
+ /* Make sure the video thread doesn't send the element message before we
+ * actually call gst_pad_push */
+ gboolean send_element_message = FALSE;
+
+ timestamp = GST_BUFFER_TIMESTAMP (inbuf);
+ if (timestamp == GST_CLOCK_TIME_NONE) {
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_ERROR;
+ }
+
+ g_mutex_lock (&self->mutex);
+ self->asegment.position = timestamp;
+ asign =
+ gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
+ self->asegment.position, ¤t_running_time);
+ if (asign == 0) {
+ g_mutex_unlock (&self->mutex);
+ gst_buffer_unref (inbuf);
+ GST_ERROR_OBJECT (self, "Could not get current running time");
+ return GST_FLOW_ERROR;
+ }
+
+ if (self->first_audio_running_time == GST_CLOCK_TIME_NONE) {
+ self->first_audio_running_time = current_running_time;
+ }
+
+ g_cond_signal (&self->audio_cond);
+ if (self->vsegment.format == GST_FORMAT_TIME) {
+ vsign =
+ gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
+ self->vsegment.position, &video_running_time);
+ if (vsign == 0) {
+ video_running_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ duration =
+ gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
+ GST_SECOND, self->ainfo.rate);
+ if (duration != GST_CLOCK_TIME_NONE) {
+ esign =
+ gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
+ self->asegment.position + duration, &running_time_at_end);
+ if (esign == 0) {
+ g_mutex_unlock (&self->mutex);
+ GST_ERROR_OBJECT (self, "Could not get running time at end");
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ while (!(self->video_eos_flag || self->audio_flush_flag
+ || self->shutdown_flag) &&
+ /* Start at timecode */
+ /* Wait if we haven't received video yet */
+ (video_running_time == GST_CLOCK_TIME_NONE
+ /* Wait if audio is after the video: dunno what to do */
+ || gst_avwait_compare_guint64_with_signs (asign,
+ running_time_at_end, vsign, video_running_time) == 1)) {
+ GST_DEBUG_OBJECT (self,
+ "Waiting for video: audio at %s%" GST_TIME_FORMAT ", video at %s%"
+ GST_TIME_FORMAT, asign < 0 ? "-" : "+",
+ GST_TIME_ARGS (running_time_at_end), vsign < 0 ? "-" : "+",
+ GST_TIME_ARGS (video_running_time));
+ g_cond_wait (&self->cond, &self->mutex);
+ vsign =
+ gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
+ self->vsegment.position, &video_running_time);
+ if (vsign == 0) {
+ video_running_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ if (self->audio_flush_flag || self->shutdown_flag) {
+ GST_DEBUG_OBJECT (self, "Shutting down, ignoring frame");
+ gst_buffer_unref (inbuf);
+ g_mutex_unlock (&self->mutex);
+ return GST_FLOW_FLUSHING;
+ }
+
+ if (self->audio_running_time_to_wait_for == GST_CLOCK_TIME_NONE
+ /* Audio ends before start : drop */
+ || gst_avwait_compare_guint64_with_signs (esign,
+ running_time_at_end, 1, self->audio_running_time_to_wait_for) == -1
+ /* Audio starts after end: drop */
+ || current_running_time >= self->audio_running_time_to_end_at) {
+ GST_DEBUG_OBJECT (self,
+ "Dropped an audio buf at %" GST_TIME_FORMAT " waiting for %"
+ GST_TIME_FORMAT " video time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_running_time),
+ GST_TIME_ARGS (self->audio_running_time_to_wait_for),
+ GST_TIME_ARGS (video_running_time));
+ GST_DEBUG_OBJECT (self, "Would have ended at %i %" GST_TIME_FORMAT,
+ esign, GST_TIME_ARGS (running_time_at_end));
+ gst_buffer_unref (inbuf);
+ inbuf = NULL;
+ if (current_running_time >= self->audio_running_time_to_end_at &&
+ (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) &&
+ !(self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED)) {
+ send_element_message = TRUE;
+ }
+ } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
+ 1, self->audio_running_time_to_wait_for) >= 0
+ && gst_avwait_compare_guint64_with_signs (esign, running_time_at_end, 1,
+ self->audio_running_time_to_end_at) == -1) {
+ /* Audio ends after start, but before end: clip */
+ GstSegment asegment2 = self->asegment;
+ guint64 start;
+ gint ssign;
+
+ ssign = gst_segment_position_from_running_time_full (&asegment2,
+ GST_FORMAT_TIME, self->audio_running_time_to_wait_for, &start);
+ if (ssign > 0) {
+ asegment2.start = start;
+ } else {
+ /* Starting before the start of the audio segment?! */
+ /* This shouldn't happen: we already know that the current audio is
+ * inside the segment, and that the end is after the current audio
+ * position */
+ GST_ELEMENT_ERROR (self, CORE, FAILED,
+ ("Failed to clip audio: it should have started before the current segment"),
+ NULL);
+ }
+
+ inbuf =
+ gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
+ self->ainfo.bpf);
+ } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
+ 1, self->audio_running_time_to_end_at) >= 0) {
+ /* Audio starts after start, but before end: clip from the other side */
+ GstSegment asegment2 = self->asegment;
+ guint64 stop;
+ gint ssign;
+
+ ssign =
+ gst_segment_position_from_running_time_full (&asegment2,
+ GST_FORMAT_TIME, self->audio_running_time_to_end_at, &stop);
+ if (ssign > 0) {
+ asegment2.stop = stop;
+ } else {
+ /* Stopping before the start of the audio segment?! */
+ /* This shouldn't happen: we already know that the current audio is
+ * inside the segment, and that the end is after the current audio
+ * position */
+ GST_ELEMENT_ERROR (self, CORE, FAILED,
+ ("Failed to clip audio: it should have ended before the current segment"),
+ NULL);
+ }
+ inbuf =
+ gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
+ self->ainfo.bpf);
+ if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
+ send_element_message = TRUE;
+ }
+ } else {
+ /* Programming error? Shouldn't happen */
+ g_assert_not_reached ();
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (inbuf) {
+ GstClockTime new_duration =
+ gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
+ GST_SECOND, self->ainfo.rate);
+ GstClockTime new_running_time_at_end =
+ gst_segment_to_running_time (&self->asegment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (inbuf) + new_duration);
+
+ GST_DEBUG_OBJECT (self,
+ "Pass audio buffer %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gst_segment_to_running_time (&self->asegment,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (inbuf))),
+ GST_TIME_ARGS (new_running_time_at_end));
+ ret = gst_pad_push (self->asrcpad, inbuf);
+ }
+
+ if (send_element_message) {
+ gboolean send_message = FALSE;
+ GstClockTime audio_running_time_to_end_at;
+
+ g_mutex_lock (&self->mutex);
+ if ((self->must_send_end_message & END_MESSAGE_VIDEO_PUSHED) ||
+ self->video_eos_flag) {
+ self->must_send_end_message = END_MESSAGE_NORMAL;
+ send_message = TRUE;
+ audio_running_time_to_end_at = self->audio_running_time_to_end_at;
+ } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
+ self->must_send_end_message |= END_MESSAGE_AUDIO_PUSHED;
+ } else {
+ g_assert_not_reached ();
++ g_mutex_unlock (&self->mutex);
+ }
+ g_mutex_unlock (&self->mutex);
+
+ if (send_message)
+ gst_avwait_send_element_message (self, TRUE,
+ audio_running_time_to_end_at);
+ }
+ send_element_message = FALSE;
+ return ret;
+ }
+
+ static GstIterator *
+ gst_avwait_iterate_internal_links (GstPad * pad, GstObject * parent)
+ {
+ GstIterator *it = NULL;
+ GstPad *opad;
+ GValue val = G_VALUE_INIT;
+ GstAvWait *self = GST_AVWAIT (parent);
+
+ if (self->asinkpad == pad)
+ opad = gst_object_ref (self->asrcpad);
+ else if (self->asrcpad == pad)
+ opad = gst_object_ref (self->asinkpad);
+ else if (self->vsinkpad == pad)
+ opad = gst_object_ref (self->vsrcpad);
+ else if (self->vsrcpad == pad)
+ opad = gst_object_ref (self->vsinkpad);
+ else
+ goto out;
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, opad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+
+ gst_object_unref (opad);
+
+ out:
+ return it;
+ }
--- /dev/null
- * positives or negatives, on the assumption that the badness of a
+ /* GStreamer
+ * Copyright (C) 2011 Entropy Wave Inc <ds@entropywave.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+ /**
+ * SECTION:element-scenechange
+ * @title: gstscenechange
+ *
+ * The scenechange element detects scene changes (also known as shot
+ * changes) in a video stream, and sends a signal when this occurs.
+ * Applications can listen to this signal and make changes to the
+ * pipeline such as cutting the stream. In addition, whenever a
+ * scene change is detected, a custom downstream "GstForceKeyUnit"
+ * event is sent to downstream elements. Most video encoder elements
+ * will insert synchronization points into the stream when this event
+ * is received. When used with a tee element, the scenechange element
+ * can be used to align the synchronization points among multiple
+ * video encoders, which is useful for segmented streaming.
+ *
+ * The scenechange element does not work with compressed video.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=some_file.ogv ! decodebin !
+ * scenechange ! theoraenc ! fakesink
+ * ]|
+ *
+ */
+ /*
+ * The algorithm used for scene change detection is a modification
+ * of Jim Easterbrook's shot change detector. I'm not aware of a
+ * research paper, but the code I got the idea from is here:
+ * http://sourceforge.net/projects/shot-change/
+ *
+ * The method is relatively simple. Calculate the sum of absolute
+ * differences of a picture and the previous picture, and compare this
+ * picture difference value with neighboring pictures. In the original
+ * algorithm, the value is compared to a configurable number of past
+ * and future pictures. However, comparing to future frames requires
+ * introducing latency into the stream, which I did not want. So this
+ * implementation only compared to previous frames.
+ *
+ * This code is more directly derived from the scene change detection
+ * implementation in Schroedinger. Schro's implementation is closer
+ * to the Easterbrook algorithm, comparing to future pictures. In
+ * terms of accuracy, schro's implementation has about 2-3 false positives
+ * or false negatives per 100 scene changes. This implementation has
+ * about 5 per 100. The threshold is tuned for minimum total false
- gst_caps_from_string (VIDEO_CAPS)));
++ * positives or negatives, on the assumption that the badness of a
+ * false negative is the same as a false positive.
+ *
+ * This algorithm is pretty much at its limit for error rate. I
+ * recommend any future work in this area to increase the complexity
+ * of detection, and then write an automatic tuning system as opposed
+ * to the manual tuning I did here.
+ *
+ * Inside the TESTING define are some hard-coded (mostly hand-written)
+ * scene change frame numbers for some easily available sequences.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideofilter.h>
+ #include <string.h>
+ #include "gstscenechange.h"
+ #include "gstscenechangeorc.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_scene_change_debug_category);
+ #define GST_CAT_DEFAULT gst_scene_change_debug_category
+
+ /* prototypes */
+
+
+ static GstFlowReturn gst_scene_change_transform_frame_ip (GstVideoFilter *
+ filter, GstVideoFrame * frame);
+
+ #undef TESTING
+ #ifdef TESTING
+ static gboolean is_shot_change (int frame_number);
+ #endif
+
+ enum
+ {
+ PROP_0
+ };
+
+ #define VIDEO_CAPS \
+ GST_VIDEO_CAPS_MAKE("{ I420, Y42B, Y41B, Y444 }")
+
+ /* class initialization */
+
+ G_DEFINE_TYPE_WITH_CODE (GstSceneChange, gst_scene_change,
+ GST_TYPE_VIDEO_FILTER,
+ GST_DEBUG_CATEGORY_INIT (gst_scene_change_debug_category, "scenechange", 0,
+ "debug category for scenechange element"));
+ GST_ELEMENT_REGISTER_DEFINE (scenechange, "scenechange",
+ GST_RANK_NONE, gst_scene_change_get_type ());
+
+ static void
+ gst_scene_change_class_init (GstSceneChangeClass * klass)
+ {
+ GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
++ GstCaps *tmp = NULL;
+
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_caps_from_string (VIDEO_CAPS)));
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+
+ gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
+ "Scene change detector",
+ "Video/Filter", "Detects scene changes in video",
+ "David Schleef <ds@entropywave.com>");
+
+ video_filter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_scene_change_transform_frame_ip);
+
+ }
+
+ static void
+ gst_scene_change_init (GstSceneChange * scenechange)
+ {
+ }
+
+
+ static double
+ get_frame_score (GstVideoFrame * f1, GstVideoFrame * f2)
+ {
+ guint32 score = 0;
+ int width, height;
+
+ width = f1->info.width;
+ height = f1->info.height;
+
+ orc_sad_nxm_u8 (&score, f1->data[0], f1->info.stride[0],
+ f2->data[0], f2->info.stride[0], width, height);
+
+ return ((double) score) / (width * height);
+ }
+
+ static GstFlowReturn
+ gst_scene_change_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame)
+ {
+ GstSceneChange *scenechange = GST_SCENE_CHANGE (filter);
+ GstVideoFrame oldframe;
+ double score_min;
+ double score_max;
+ double threshold;
+ double score;
+ gboolean change;
+ gboolean ret;
+ int i;
+
+ GST_DEBUG_OBJECT (scenechange, "transform_frame_ip");
+
+ if (!scenechange->oldbuf) {
+ scenechange->n_diffs = 0;
+ memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS);
+ scenechange->oldbuf = gst_buffer_ref (frame->buffer);
+ memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo));
+ return GST_FLOW_OK;
+ }
+
+ ret =
+ gst_video_frame_map (&oldframe, &scenechange->oldinfo,
+ scenechange->oldbuf, GST_MAP_READ);
+ if (!ret) {
+ GST_ERROR_OBJECT (scenechange, "failed to map old video frame");
+ return GST_FLOW_ERROR;
+ }
+
+ score = get_frame_score (&oldframe, frame);
+
+ gst_video_frame_unmap (&oldframe);
+
+ gst_buffer_unref (scenechange->oldbuf);
+ scenechange->oldbuf = gst_buffer_ref (frame->buffer);
+ memcpy (&scenechange->oldinfo, &frame->info, sizeof (GstVideoInfo));
+
+ memmove (scenechange->diffs, scenechange->diffs + 1,
+ sizeof (double) * (SC_N_DIFFS - 1));
+ scenechange->diffs[SC_N_DIFFS - 1] = score;
+ scenechange->n_diffs++;
+
+ score_min = scenechange->diffs[0];
+ score_max = scenechange->diffs[0];
+ for (i = 1; i < SC_N_DIFFS - 1; i++) {
+ score_min = MIN (score_min, scenechange->diffs[i]);
+ score_max = MAX (score_max, scenechange->diffs[i]);
+ }
+
+ threshold = 1.8 * score_max - 0.8 * score_min;
+
+ if (scenechange->n_diffs > (SC_N_DIFFS - 1)) {
+ if (score < 5) {
+ change = FALSE;
+ } else if (score / threshold < 1.0) {
+ change = FALSE;
+ } else if ((score > 30)
+ && (score / scenechange->diffs[SC_N_DIFFS - 2] > 1.4)) {
+ change = TRUE;
+ } else if (score / threshold > 2.3) {
+ change = TRUE;
+ } else if (score > 50) {
+ change = TRUE;
+ } else {
+ change = FALSE;
+ }
+ } else {
+ change = FALSE;
+ }
+
+ if (change == TRUE) {
+ memset (scenechange->diffs, 0, sizeof (double) * SC_N_DIFFS);
+ scenechange->n_diffs = 0;
+ }
+ #ifdef TESTING
+ if (change != is_shot_change (scenechange->n_diffs)) {
+ g_print ("%d %g %g %g %d\n", scenechange->n_diffs, score / threshold,
+ score, threshold, change);
+ }
+ #endif
+
+ if (change) {
+ GstEvent *event;
+
+ GST_INFO_OBJECT (scenechange, "%d %g %g %g %d",
+ scenechange->n_diffs, score / threshold, score, threshold, change);
+
+ event =
+ gst_video_event_new_downstream_force_key_unit (GST_BUFFER_PTS
+ (frame->buffer), GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE,
+ scenechange->count++);
+
+ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (scenechange), event);
+ }
+
+ return GST_FLOW_OK;
+ }
+
+
+
+
+
+
+ #ifdef TESTING
+ /* This is from ds's personal collection. No, you can't have it. */
+ int showreel_changes[] = {
+ 242, 483, 510, 550, 579, 603, 609, 1056, 1067, 1074, 1079, 1096,
+ 1106, 1113, 1127, 1145, 1156, 1170, 1212, 1228, 1243, 1269, 1274,
+ 1322, 1349, 1370, 1378, 1423, 1456, 1458, 1508, 1519, 1542, 1679,
+ 1767, 1837, 1895, 1962, 2006, 2035, 2102, 2139, 2196, 2561, 2664,
+ 2837, 2895, 2985, 3035, 3077, 3128, 3176, 3218, 3306, 3351, 3388,
+ 3421, 3470, 3711, 3832, 4029, 4184, 4444, 4686, 4719, 4825, 4941,
+ 5009, 5091, 5194, 5254, 5286, 5287, 5343, 5431, 5501, 5634, 5695, 5788,
+ 5839, 5861, 5930, 6030, 6168, 6193, 6237, 6336, 6376, 6421, 6495,
+ 6550, 6611, 6669, 6733, 6819, 6852, 6944, 7087, 7148, 7189, 7431,
+ 7540, 7599, 7632, 7661, 7693, 7930, 7963, 8003, 8076, 8109, 8147,
+ 8177, 8192, 8219, 8278, 8322, 8370, 8409, 8566, 8603, 8747, 8775,
+ 8873, 8907, 8955, 8969, 8983, 8997, 9026, 9079, 9140, 9165, 9206,
+ 9276, 9378, 9449, 9523, 9647, 9703, 9749, 9790, 9929, 10056, 10216,
+ 10307, 10411, 10487, 10557, 10695, 10770, 10854, 11095, 11265, 11517, 11589,
+ 11686, 11825, 11940, 12004, 12047, 12113, 12179, 12233, 12532, 12586, 12708,
+ 12793, 12877, 12954, 13030, 13105, 13177, 13279, 13396, 13486, 13538, 13561,
+ 13591, 13627, 13656, 13709, 13763, 13815, 13842, 13876, 13906, 13929, 13955,
+ 14003, 14070, 14097, 14127, 14153, 14198, 14269, 14348, 14367, 14440, 14488,
+ 14548, 14573, 14599, 14630, 14665, 14907, 14962, 15013, 15089, 15148, 15227,
+ 15314, 15355, 15369, 15451, 15470, 15542, 15570, 15640, 15684, 15781, 15869,
+ 15938, 16172, 16266, 16429, 16479, 16521, 16563, 16612, 16671, 16692, 16704,
+ 16720, 16756, 16789, 16802, 16815, 16867, 16908, 16939, 16953, 16977, 17006,
+ 17014, 17026, 17040, 17062, 17121, 17176, 17226, 17322, 17444, 17496, 17641,
+ 17698, 17744, 17826, 17913, 17993, 18073, 18219, 18279, 18359, 18475, 18544,
+ 18587, 18649, 18698, 18756, 18826, 18853, 18866, 19108, 19336, 19481, 19544,
+ 19720, 19816, 19908, 19982, 20069, 20310, 20355, 20374, 20409, 20469, 20599,
+ 20607, 20652, 20805, 20822, 20882, 20982, 21029, 21433, 21468, 21561, 21602,
+ 21661, 21720, 21909, 22045, 22166, 22225, 22323, 22362, 22433, 22477, 22529,
+ 22571, 22617, 22642, 22676, 22918, 22978, 23084, 23161, 23288, 23409, 23490,
+ 23613, 23721, 23815, 24131, 24372, 24468, 24507, 24555, 24568, 24616, 24634,
+ 24829, 24843, 24919, 24992, 25040, 25160, 25288, 25607, 25684, 25717, 25764,
+ 25821, 25866, 25901, 25925, 25941, 25978, 25998, 26011, 26030, 26055, 26118,
+ 26133, 26145, 26159, 26175, 26182, 26195, 26205, 26238, 26258, 26316, 26340,
+ 26581, 26725, 26834, 26874, 26995, 27065, 27178, 27238, 27365, 27607, 27669,
+ 27694,
+ 27774, 27800, 27841, 27930, 27985, 28057, 28091, 28132, 28189, 28270, 28545,
+ 28653, 28711, 28770, 28886, 28966, 29139, 29241, 29356, 29415, 29490, 29576,
+ 29659, 29776, 29842, 29910, 30029, 30056, 30100, 30129, 30175, 30316, 30376,
+ 30441, 30551, 30666, 30784, 30843, 30948, 31045, 31286, 31315, 31534, 31607,
+ 31742,
+ 31817, 31853, 31984, 32009, 32112, 32162, 32210, 32264
+ };
+
+ /* Sintel */
+ int sintel_changes[] = {
+ 752, 1018, 1036, 1056, 1078, 1100, 1169, 1319, 1339, 1370,
+ 1425, 1455, 1494, 1552, 1572, 1637, 1663, 1777, 1955, 2060,
+ 2125, 2429, 2624, 2780, 2835, 2881, 2955, 3032, 3144, 3217,
+ 3315, 3384, 3740, 3890, 4234, 4261, 4322, 4368, 4425, 4481,
+ 4555, 4605, 4671, 4714, 4743, 4875, 4920, 5082, 5158, 5267,
+ 5379, 5956, 6021, 6071, 6112, 6139, 6221, 6318, 6374, 6519,
+ 6558, 6615, 6691, 6803, 6900, 6944, 7134, 7266, 7351, 7414,
+ 7467, 7503, 7559, 7573, 7656, 7733, 7876, 7929, 7971, 7985,
+ 8047, 8099, 8144, 8215, 8394, 8435, 8480, 9133, 9190, 9525,
+ 9962,
+ };
+
+ /* Breathe Out video, http://media.xiph.org/video/misc/ */
+ int breatheout_changes[] = {
+ 143, 263, 334, 426, 462, 563, 583, 618, 655, 707,
+ 818, 823, 858, 913, 956, 977, 999, 1073, 1124, 1144,
+ 1166, 1187, 1206, 1227, 1240, 1264, 1289, 1312, 1477, 1535,
+ 1646, 1692, 1739, 1757, 1798, 1855, 1974, 2048, 2129, 2212,
+ 2369, 2412, 2463, 2578, 2649, 2699, 2778, 2857, 2923, 3014,
+ 3107, 3246, 3321, 3350, 3459, 3498, 3541, 3567, 3613, 3636,
+ 3673, 3709, 3747, 3834, 3862, 3902, 3922, 4022, 4117, 4262,
+ 4303, 4357, 4556, 4578, 4617, 4716, 4792, 4873, 4895, 4917,
+ 4932, 4972, 5015, 5034, 5058, 5090, 5162, 5180, 5202, 5222,
+ 5239, 5258, 5281, 5298, 5397, 5430,
+ 485, 507, 534, 665, 685, 755, 1023, 1379, 1441, 1503,
+ 1584, 1621, 1903, 2081, 2281, 2511, 2958, 3071, 3185, 3214,
+ 3271, 3424, 3479, 3588, 3879, 3979, 4043, 4062, 4143, 4207,
+ 4237, 4336, 4461, 4476, 4533, 4647, 4815, 4853, 4949, 5075,
+ 5142, 5316, 5376,
+ 3514, 3952, 4384, 5337
+ };
+
+ #define changes showreel_changes
+
+ static gboolean
+ is_shot_change (int frame_number)
+ {
+ int i;
+ for (i = 0; i < sizeof (changes) / sizeof (changes[0]); i++) {
+ if (changes[i] == frame_number)
+ return TRUE;
+ }
+ return FALSE;
+ }
+ #endif
--- /dev/null
- gst_caps_from_string (VIDEO_SRC_CAPS)));
+ /* GStreamer
+ * Copyright (C) 2013 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+ /**
+ * SECTION:element-videodiff
+ * @title: gstvideodiff
+ *
+ * The videodiff element highlights the difference between a frame and its
+ * previous on the luma plane.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc pattern=ball ! videodiff ! videoconvert ! autovideosink
+ * ]|
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideofilter.h>
+ #include "gstvideodiff.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_video_diff_debug_category);
+ #define GST_CAT_DEFAULT gst_video_diff_debug_category
+
+ /* prototypes */
+
+ static GstFlowReturn gst_video_diff_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * inframe, GstVideoFrame * outframe);
+
+ #define VIDEO_SRC_CAPS \
+ GST_VIDEO_CAPS_MAKE("{ I420, Y444, Y42B, Y41B }")
+
+ #define VIDEO_SINK_CAPS \
+ GST_VIDEO_CAPS_MAKE("{ I420, Y444, Y42B, Y41B }")
+
+
+ G_DEFINE_TYPE_WITH_CODE (GstVideoDiff, gst_video_diff, GST_TYPE_VIDEO_FILTER,
+ GST_DEBUG_CATEGORY_INIT (gst_video_diff_debug_category, "videodiff", 0,
+ "debug category for videodiff element"));
+ GST_ELEMENT_REGISTER_DEFINE (videodiff, "videodiff",
+ GST_RANK_NONE, GST_TYPE_VIDEO_DIFF);
+
+ static void
+ gst_video_diff_class_init (GstVideoDiffClass * klass)
+ {
+ GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
++ GstCaps *tmp = NULL;
+
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_caps_from_string (VIDEO_SINK_CAPS)));
++ tmp = gst_caps_from_string (VIDEO_SRC_CAPS)));
++ gst_caps_unref (tmp);
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
++ tmp = gst_caps_from_string (VIDEO_SINK_CAPS)));
++ gst_caps_unref (tmp);
+
+ gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
+ "Video Diff", "Video/Filter",
+ "Visualize differences between adjacent video frames",
+ "David Schleef <ds@schleef.org>");
+
+ video_filter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_diff_transform_frame);
+ }
+
+ static void
+ gst_video_diff_init (GstVideoDiff * videodiff)
+ {
+ videodiff->threshold = 10;
+ }
+
+ static GstFlowReturn
+ gst_video_diff_transform_frame_ip_planarY (GstVideoDiff * videodiff,
+ GstVideoFrame * outframe, GstVideoFrame * inframe, GstVideoFrame * oldframe)
+ {
+ int width = inframe->info.width;
+ int height = inframe->info.height;
+ int i, j;
+ int threshold = videodiff->threshold;
+ int t = videodiff->t;
+
+ for (j = 0; j < height; j++) {
+ guint8 *d = (guint8 *) outframe->data[0] + outframe->info.stride[0] * j;
+ guint8 *s1 = (guint8 *) oldframe->data[0] + oldframe->info.stride[0] * j;
+ guint8 *s2 = (guint8 *) inframe->data[0] + inframe->info.stride[0] * j;
+ for (i = 0; i < width; i++) {
+ if ((s2[i] < s1[i] - threshold) || (s2[i] > s1[i] + threshold)) {
+ if ((i + j + t) & 0x4) {
+ d[i] = 16;
+ } else {
+ d[i] = 240;
+ }
+ } else {
+ d[i] = s2[i];
+ }
+ }
+ }
+ for (j = 0; j < GST_VIDEO_FRAME_COMP_HEIGHT (inframe, 1); j++) {
+ guint8 *d = (guint8 *) outframe->data[1] + outframe->info.stride[1] * j;
+ guint8 *s = (guint8 *) inframe->data[1] + inframe->info.stride[1] * j;
+ memcpy (d, s, GST_VIDEO_FRAME_COMP_WIDTH (inframe, 1));
+ }
+ for (j = 0; j < GST_VIDEO_FRAME_COMP_HEIGHT (inframe, 2); j++) {
+ guint8 *d = (guint8 *) outframe->data[2] + outframe->info.stride[2] * j;
+ guint8 *s = (guint8 *) inframe->data[2] + inframe->info.stride[2] * j;
+ memcpy (d, s, GST_VIDEO_FRAME_COMP_WIDTH (inframe, 2));
+ }
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_video_diff_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * inframe, GstVideoFrame * outframe)
+ {
+ GstVideoDiff *videodiff = GST_VIDEO_DIFF (filter);
+
+ GST_DEBUG_OBJECT (videodiff, "transform_frame_ip");
+
+ if (videodiff->previous_buffer) {
+ GstVideoFrame oldframe;
+
+ gst_video_frame_map (&oldframe, &videodiff->oldinfo,
+ videodiff->previous_buffer, GST_MAP_READ);
+
+ switch (inframe->info.finfo->format) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ gst_video_diff_transform_frame_ip_planarY (videodiff, outframe,
+ inframe, &oldframe);
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ gst_video_frame_unmap (&oldframe);
+ gst_buffer_unref (videodiff->previous_buffer);
+ } else {
+ int k;
+ int j;
+ for (k = 0; k < 3; k++) {
+ for (j = 0; j < GST_VIDEO_FRAME_COMP_HEIGHT (inframe, k); j++) {
+ guint8 *d = (guint8 *) outframe->data[k] + outframe->info.stride[k] * j;
+ guint8 *s = (guint8 *) inframe->data[k] + inframe->info.stride[k] * j;
+ memcpy (d, s, GST_VIDEO_FRAME_COMP_WIDTH (inframe, k));
+ }
+ }
+ }
+
+ videodiff->previous_buffer = gst_buffer_ref (inframe->buffer);
+ memcpy (&videodiff->oldinfo, &inframe->info, sizeof (GstVideoInfo));
+
+ return GST_FLOW_OK;
+ }
--- /dev/null
+ /* GStreamer H.264 Parser
+ * Copyright (C) <2010> Collabora ltd
+ * Copyright (C) <2010> Nokia Corporation
+ * Copyright (C) <2011> Intel Corporation
+ *
+ * Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <gst/base/base.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/video/video.h>
+ #include "gstvideoparserselements.h"
+ #include "gsth264parse.h"
+
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY (h264_parse_debug);
+ #define GST_CAT_DEFAULT h264_parse_debug
+
+ #define DEFAULT_CONFIG_INTERVAL (0)
+ #define DEFAULT_UPDATE_TIMECODE FALSE
+
+ enum
+ {
+ PROP_0,
+ PROP_CONFIG_INTERVAL,
+ PROP_UPDATE_TIMECODE,
+ };
+
+ enum
+ {
+ GST_H264_PARSE_FORMAT_NONE,
+ GST_H264_PARSE_FORMAT_AVC,
+ GST_H264_PARSE_FORMAT_BYTE,
+ GST_H264_PARSE_FORMAT_AVC3
+ };
+
+ enum
+ {
+ GST_H264_PARSE_ALIGN_NONE = 0,
+ GST_H264_PARSE_ALIGN_NAL,
+ GST_H264_PARSE_ALIGN_AU
+ };
+
+ enum
+ {
+ GST_H264_PARSE_STATE_GOT_SPS = 1 << 0,
+ GST_H264_PARSE_STATE_GOT_PPS = 1 << 1,
+ GST_H264_PARSE_STATE_GOT_SLICE = 1 << 2,
+
+ GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS = (GST_H264_PARSE_STATE_GOT_SPS |
+ GST_H264_PARSE_STATE_GOT_PPS),
+ GST_H264_PARSE_STATE_VALID_PICTURE =
+ (GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS |
+ GST_H264_PARSE_STATE_GOT_SLICE)
+ };
+
+ enum
+ {
+ GST_H264_PARSE_SEI_EXPIRED = 0,
+ GST_H264_PARSE_SEI_ACTIVE = 1,
+ GST_H264_PARSE_SEI_PARSED = 2,
+ };
+
+ #define GST_H264_PARSE_STATE_VALID(parse, expected_state) \
+ (((parse)->state & (expected_state)) == (expected_state))
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264"));
+
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true, "
+ "stream-format=(string) { avc, avc3, byte-stream }, "
+ "alignment=(string) { au, nal }"));
+
+ #define parent_class gst_h264_parse_parent_class
+ G_DEFINE_TYPE (GstH264Parse, gst_h264_parse, GST_TYPE_BASE_PARSE);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (h264parse, "h264parse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_H264_PARSE,
+ videoparsers_element_init (plugin));
+
+ static void gst_h264_parse_finalize (GObject * object);
+
+ static gboolean gst_h264_parse_start (GstBaseParse * parse);
+ static gboolean gst_h264_parse_stop (GstBaseParse * parse);
+ static GstFlowReturn gst_h264_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+ static GstFlowReturn gst_h264_parse_parse_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+ static GstFlowReturn gst_h264_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+
+ static void gst_h264_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_h264_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
+ static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
+ GstCaps * filter);
+ static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
+ static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
+ static void gst_h264_parse_update_src_caps (GstH264Parse * h264parse,
+ GstCaps * caps);
+
+ static void
+ gst_h264_parse_class_init (GstH264ParseClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (h264_parse_debug, "h264parse", 0, "h264 parser");
+
+ gobject_class->finalize = gst_h264_parse_finalize;
+ gobject_class->set_property = gst_h264_parse_set_property;
+ gobject_class->get_property = gst_h264_parse_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_CONFIG_INTERVAL,
+ g_param_spec_int ("config-interval",
+ "SPS PPS Send Interval",
+ "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
+ "will be multiplexed in the data stream when detected.) "
+ "(0 = disabled, -1 = send with every IDR frame)",
+ -1, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstH264Parse:update-timecode:
+ *
+ * If the stream contains Picture Timing SEI, update their timecode values
+ * using upstream GstVideoTimeCodeMeta. However, if there are no Picture
+ * Timing SEI in bitstream, this property will not insert the SEI into the
+ * bitstream - it only modifies existing ones.
+ * Moreover, even if both GstVideoTimeCodeMeta and Picture Timing SEI
+ * are present, if pic_struct_present_flag of VUI is equal to zero,
+ * timecode values will not updated as there is not enough information
+ * in the stream to do so.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_UPDATE_TIMECODE,
+ g_param_spec_boolean ("update-timecode",
+ "Update Timecode",
+ "Update time code values in Picture Timing SEI if GstVideoTimeCodeMeta "
+ "is attached to incoming buffer and also Picture Timing SEI exists "
+ "in the bitstream. To make this property work, SPS must contain "
+ "VUI and pic_struct_present_flag of VUI must be non-zero",
+ DEFAULT_UPDATE_TIMECODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /* Override BaseParse vfuncs */
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_h264_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_h264_parse_stop);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_h264_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
+ parse_class->sink_event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class, "H.264 parser",
+ "Codec/Parser/Converter/Video",
+ "Parses H.264 streams",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+ }
+
+ static void
+ gst_h264_parse_init (GstH264Parse * h264parse)
+ {
+ h264parse->frame_out = gst_adapter_new ();
+ gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (h264parse), FALSE);
+ gst_base_parse_set_infer_ts (GST_BASE_PARSE (h264parse), FALSE);
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (h264parse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (h264parse));
+
+ h264parse->aud_needed = TRUE;
+ h264parse->aud_insert = TRUE;
+ h264parse->update_timecode = DEFAULT_UPDATE_TIMECODE;
+ }
+
+ static void
+ gst_h264_parse_finalize (GObject * object)
+ {
+ GstH264Parse *h264parse = GST_H264_PARSE (object);
+
+ g_object_unref (h264parse->frame_out);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_h264_parse_reset_frame (GstH264Parse * h264parse)
+ {
+ GST_DEBUG_OBJECT (h264parse, "reset frame");
+
+ /* done parsing; reset state */
+ h264parse->current_off = -1;
+
+ h264parse->update_caps = FALSE;
+ h264parse->idr_pos = -1;
+ h264parse->sei_pos = -1;
+ h264parse->pic_timing_sei_pos = -1;
+ h264parse->pic_timing_sei_size = -1;
+ h264parse->keyframe = FALSE;
+ h264parse->predicted = FALSE;
+ h264parse->bidirectional = FALSE;
+ h264parse->header = FALSE;
+ h264parse->frame_start = FALSE;
+ h264parse->have_sps_in_frame = FALSE;
+ h264parse->have_pps_in_frame = FALSE;
+ gst_adapter_clear (h264parse->frame_out);
+ }
+
+ static void
+ gst_h264_parse_reset_stream_info (GstH264Parse * h264parse)
+ {
+ gint i;
+
+ h264parse->width = 0;
+ h264parse->height = 0;
+ h264parse->fps_num = 0;
+ h264parse->fps_den = 0;
+ h264parse->upstream_par_n = -1;
+ h264parse->upstream_par_d = -1;
+ h264parse->parsed_par_n = 0;
+ h264parse->parsed_par_d = 0;
+ h264parse->parsed_colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ h264parse->parsed_colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ h264parse->parsed_colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ h264parse->parsed_colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+
+ h264parse->have_pps = FALSE;
+ h264parse->have_sps = FALSE;
+
+ h264parse->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ h264parse->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ h264parse->first_in_bundle = TRUE;
+
+ h264parse->align = GST_H264_PARSE_ALIGN_NONE;
+ h264parse->format = GST_H264_PARSE_FORMAT_NONE;
+
+ h264parse->transform = FALSE;
+ h264parse->nal_length_size = 4;
+ h264parse->packetized = FALSE;
+ h264parse->push_codec = FALSE;
+ h264parse->first_frame = TRUE;
+
+ gst_buffer_replace (&h264parse->codec_data, NULL);
+ gst_buffer_replace (&h264parse->codec_data_in, NULL);
+
+ gst_h264_parse_reset_frame (h264parse);
+
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++)
+ gst_buffer_replace (&h264parse->sps_nals[i], NULL);
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++)
+ gst_buffer_replace (&h264parse->pps_nals[i], NULL);
+
+ gst_video_mastering_display_info_init (&h264parse->mastering_display_info);
+ h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_EXPIRED;
+
+ gst_video_content_light_level_init (&h264parse->content_light_level);
+ h264parse->content_light_level_state = GST_H264_PARSE_SEI_EXPIRED;
+ }
+
+ static void
+ gst_h264_parse_reset (GstH264Parse * h264parse)
+ {
+ h264parse->last_report = GST_CLOCK_TIME_NONE;
+
+ h264parse->dts = GST_CLOCK_TIME_NONE;
+ h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
+ h264parse->do_ts = TRUE;
+
+ h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ gst_event_replace (&h264parse->force_key_unit_event, NULL);
+
+ h264parse->discont = FALSE;
+ h264parse->discard_bidirectional = FALSE;
+ h264parse->marker = FALSE;
+
+ gst_h264_parse_reset_stream_info (h264parse);
+ }
+
+ static gboolean
+ gst_h264_parse_start (GstBaseParse * parse)
+ {
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "start");
+ gst_h264_parse_reset (h264parse);
+
+ h264parse->nalparser = gst_h264_nal_parser_new ();
+
+ h264parse->state = 0;
+ h264parse->dts = GST_CLOCK_TIME_NONE;
+ h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
+ h264parse->sei_pic_struct_pres_flag = FALSE;
+ h264parse->sei_pic_struct = 0;
+ h264parse->field_pic_flag = 0;
+ h264parse->aud_needed = TRUE;
+ h264parse->aud_insert = FALSE;
+
+ gst_base_parse_set_min_frame_size (parse, 4);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_h264_parse_stop (GstBaseParse * parse)
+ {
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "stop");
+ gst_h264_parse_reset (h264parse);
+
+ gst_h264_nal_parser_free (h264parse->nalparser);
+
+ return TRUE;
+ }
+
+ static const gchar *
+ gst_h264_parse_get_string (GstH264Parse * parse, gboolean format, gint code)
+ {
+ if (format) {
+ switch (code) {
+ case GST_H264_PARSE_FORMAT_AVC:
+ return "avc";
+ case GST_H264_PARSE_FORMAT_BYTE:
+ return "byte-stream";
+ case GST_H264_PARSE_FORMAT_AVC3:
+ return "avc3";
+ default:
+ return "none";
+ }
+ } else {
+ switch (code) {
+ case GST_H264_PARSE_ALIGN_NAL:
+ return "nal";
+ case GST_H264_PARSE_ALIGN_AU:
+ return "au";
+ default:
+ return "none";
+ }
+ }
+ }
+
+ static void
+ gst_h264_parse_format_from_caps (GstCaps * caps, guint * format, guint * align)
+ {
+ if (format)
+ *format = GST_H264_PARSE_FORMAT_NONE;
+
+ if (align)
+ *align = GST_H264_PARSE_ALIGN_NONE;
+
+ g_return_if_fail (gst_caps_is_fixed (caps));
+
+ GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps);
+
+ if (caps && gst_caps_get_size (caps) > 0) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *str = NULL;
+
+ if (format) {
+ if ((str = gst_structure_get_string (s, "stream-format"))) {
+ if (strcmp (str, "avc") == 0)
+ *format = GST_H264_PARSE_FORMAT_AVC;
+ else if (strcmp (str, "byte-stream") == 0)
+ *format = GST_H264_PARSE_FORMAT_BYTE;
+ else if (strcmp (str, "avc3") == 0)
+ *format = GST_H264_PARSE_FORMAT_AVC3;
+ }
+ }
+
+ if (align) {
+ if ((str = gst_structure_get_string (s, "alignment"))) {
+ if (strcmp (str, "au") == 0)
+ *align = GST_H264_PARSE_ALIGN_AU;
+ else if (strcmp (str, "nal") == 0)
+ *align = GST_H264_PARSE_ALIGN_NAL;
+ }
+ }
+ }
+ }
+
+ /* check downstream caps to configure format and alignment */
+ static void
+ gst_h264_parse_negotiate (GstH264Parse * h264parse, gint in_format,
+ GstCaps * in_caps)
+ {
+ GstCaps *caps;
+ guint format = h264parse->format;
+ guint align = h264parse->align;
+
+ g_return_if_fail ((in_caps == NULL) || gst_caps_is_fixed (in_caps));
+
+ caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
+ GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps);
+
+ /* concentrate on leading structure, since decodebin parser
+ * capsfilter always includes parser template caps */
+ if (caps) {
+ caps = gst_caps_truncate (caps);
+ GST_DEBUG_OBJECT (h264parse, "negotiating with caps: %" GST_PTR_FORMAT,
+ caps);
+ }
+
+ h264parse->can_passthrough = FALSE;
+
+ if (in_caps && caps) {
+ if (gst_caps_can_intersect (in_caps, caps)) {
+ GST_DEBUG_OBJECT (h264parse, "downstream accepts upstream caps");
+ gst_h264_parse_format_from_caps (in_caps, &format, &align);
+ gst_caps_unref (caps);
+ caps = NULL;
+ h264parse->can_passthrough = TRUE;
+ }
+ }
+
+ /* FIXME We could fail the negotiation immediately if caps are empty */
+ if (caps && !gst_caps_is_empty (caps)) {
+ /* fixate to avoid ambiguity with lists when parsing */
+ caps = gst_caps_fixate (caps);
+ gst_h264_parse_format_from_caps (caps, &format, &align);
+ }
+
+ /* default */
+ if (!format)
+ format = GST_H264_PARSE_FORMAT_BYTE;
+ if (!align)
+ align = GST_H264_PARSE_ALIGN_AU;
+
+ GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s",
+ gst_h264_parse_get_string (h264parse, TRUE, format),
+ gst_h264_parse_get_string (h264parse, FALSE, align));
+
+ h264parse->format = format;
+ h264parse->align = align;
+
+ h264parse->transform = in_format != h264parse->format ||
+ align == GST_H264_PARSE_ALIGN_AU;
+
+ if (caps)
+ gst_caps_unref (caps);
+ }
+
+ static GstBuffer *
+ gst_h264_parse_wrap_nal (GstH264Parse * h264parse, guint format, guint8 * data,
+ guint size)
+ {
+ GstBuffer *buf;
+ guint nl = h264parse->nal_length_size;
+ guint32 tmp;
+
+ GST_DEBUG_OBJECT (h264parse, "nal length %d", size);
+
+ buf = gst_buffer_new_allocate (NULL, 4 + size, NULL);
+ if (format == GST_H264_PARSE_FORMAT_AVC
+ || format == GST_H264_PARSE_FORMAT_AVC3) {
+ tmp = GUINT32_TO_BE (size << (32 - 8 * nl));
+ } else {
+ /* HACK: nl should always be 4 here, otherwise this won't work.
+ * There are legit cases where nl in avc stream is 2, but byte-stream
+ * SC is still always 4 bytes. */
+ nl = 4;
+ tmp = GUINT32_TO_BE (1);
+ }
+
+ gst_buffer_fill (buf, 0, &tmp, sizeof (guint32));
+ gst_buffer_fill (buf, nl, data, size);
+ gst_buffer_set_size (buf, size + nl);
+
+ return buf;
+ }
+
+ static void
+ gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
+ GstH264NalUnitType naltype, GstH264NalUnit * nalu)
+ {
+ GstBuffer *buf, **store;
+ guint size = nalu->size, store_size;
+
+ if (naltype == GST_H264_NAL_SPS || naltype == GST_H264_NAL_SUBSET_SPS) {
+ store_size = GST_H264_MAX_SPS_COUNT;
+ store = h264parse->sps_nals;
+ GST_DEBUG_OBJECT (h264parse, "storing sps %u", id);
+ } else if (naltype == GST_H264_NAL_PPS) {
+ store_size = GST_H264_MAX_PPS_COUNT;
+ store = h264parse->pps_nals;
+ GST_DEBUG_OBJECT (h264parse, "storing pps %u", id);
+ } else
+ return;
+
+ if (id >= store_size) {
+ GST_DEBUG_OBJECT (h264parse, "unable to store nal, id out-of-range %d", id);
+ return;
+ }
+
+ buf = gst_buffer_new_allocate (NULL, size, NULL);
+ gst_buffer_fill (buf, 0, nalu->data + nalu->offset, size);
+
+ /* Indicate that buffer contain a header needed for decoding */
+ if (naltype == GST_H264_NAL_SPS || naltype == GST_H264_NAL_PPS)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+
+ if (store[id])
+ gst_buffer_unref (store[id]);
+
+ store[id] = buf;
+ }
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ static const gchar *nal_names[] = {
+ "Unknown",
+ "Slice",
+ "Slice DPA",
+ "Slice DPB",
+ "Slice DPC",
+ "Slice IDR",
+ "SEI",
+ "SPS",
+ "PPS",
+ "AU delimiter",
+ "Sequence End",
+ "Stream End",
+ "Filler Data",
+ "SPS extension",
+ "Prefix",
+ "SPS Subset",
+ "Depth Parameter Set",
+ "Reserved", "Reserved",
+ "Slice Aux Unpartitioned",
+ "Slice Extension",
+ "Slice Depth/3D-AVC Extension"
+ };
+
+ static const gchar *
+ _nal_name (GstH264NalUnitType nal_type)
+ {
+ if (nal_type <= GST_H264_NAL_SLICE_DEPTH)
+ return nal_names[nal_type];
+ return "Invalid";
+ }
+ #endif
+
+ static void
+ gst_h264_parse_process_sei_user_data (GstH264Parse * h264parse,
+ GstH264RegisteredUserData * rud)
+ {
+ guint16 provider_code;
+ GstByteReader br;
+ GstVideoParseUtilsField field = GST_VIDEO_PARSE_UTILS_FIELD_1;
+
+ /* only US country code is currently supported */
+ switch (rud->country_code) {
+ case ITU_T_T35_COUNTRY_CODE_US:
+ break;
+ default:
+ GST_LOG_OBJECT (h264parse, "Unsupported country code %d",
+ rud->country_code);
+ return;
+ }
+
+ if (rud->data == NULL || rud->size < 2)
+ return;
+
+ gst_byte_reader_init (&br, rud->data, rud->size);
+
+ provider_code = gst_byte_reader_get_uint16_be_unchecked (&br);
+
+ if (h264parse->sei_pic_struct ==
+ (guint8) GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD)
+ field = GST_VIDEO_PARSE_UTILS_FIELD_2;
+ gst_video_parse_user_data ((GstElement *) h264parse, &h264parse->user_data,
+ &br, field, provider_code);
+
+ }
+
+ static void
+ gst_h264_parse_process_sei (GstH264Parse * h264parse, GstH264NalUnit * nalu)
+ {
+ GstH264SEIMessage sei;
+ GstH264NalParser *nalparser = h264parse->nalparser;
+ GstH264ParserResult pres;
+ GArray *messages;
+ guint i;
+
+ pres = gst_h264_parser_parse_sei (nalparser, nalu, &messages);
+ if (pres != GST_H264_PARSER_OK)
+ GST_WARNING_OBJECT (h264parse, "failed to parse one or more SEI message");
+
+ /* Even if pres != GST_H264_PARSER_OK, some message could have been parsed and
+ * stored in messages.
+ */
+ for (i = 0; i < messages->len; i++) {
+ sei = g_array_index (messages, GstH264SEIMessage, i);
+ switch (sei.payloadType) {
+ case GST_H264_SEI_PIC_TIMING:
+ {
+ guint j;
+ h264parse->sei_pic_struct_pres_flag =
+ sei.payload.pic_timing.pic_struct_present_flag;
+ h264parse->sei_cpb_removal_delay =
+ sei.payload.pic_timing.cpb_removal_delay;
+ if (h264parse->sei_pic_struct_pres_flag) {
+ h264parse->sei_pic_struct = sei.payload.pic_timing.pic_struct;
+ }
+
+ h264parse->num_clock_timestamp = 0;
+ memcpy (&h264parse->pic_timing_sei, &sei.payload.pic_timing,
+ sizeof (GstH264PicTiming));
+
+ for (j = 0; j < 3; j++) {
+ if (sei.payload.pic_timing.clock_timestamp_flag[j]) {
+ h264parse->num_clock_timestamp++;
+ }
+ }
+
+ if (h264parse->sei_pic_struct_pres_flag && h264parse->update_timecode) {
+ /* FIXME: add support multiple messages in a SEI nalu.
+ * Updating only this SEI message and preserving the others
+ * is a bit complicated */
+ if (messages->len == 1) {
+ h264parse->pic_timing_sei_pos = nalu->sc_offset;
+ h264parse->pic_timing_sei_size =
+ nalu->size + (nalu->offset - nalu->sc_offset);
+ }
+ }
+
+ GST_LOG_OBJECT (h264parse, "pic timing updated");
+ break;
+ }
+ case GST_H264_SEI_REGISTERED_USER_DATA:
+ gst_h264_parse_process_sei_user_data (h264parse,
+ &sei.payload.registered_user_data);
+ break;
+ case GST_H264_SEI_BUF_PERIOD:
+ if (h264parse->ts_trn_nb == GST_CLOCK_TIME_NONE ||
+ h264parse->dts == GST_CLOCK_TIME_NONE)
+ h264parse->ts_trn_nb = 0;
+ else
+ h264parse->ts_trn_nb = h264parse->dts;
+
+ GST_LOG_OBJECT (h264parse,
+ "new buffering period; ts_trn_nb updated: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (h264parse->ts_trn_nb));
+ break;
+
+ /* Additional messages that are not innerly useful to the
+ * element but for debugging purposes */
+ case GST_H264_SEI_RECOVERY_POINT:
+ GST_LOG_OBJECT (h264parse, "recovery point found: %u %u %u %u",
+ sei.payload.recovery_point.recovery_frame_cnt,
+ sei.payload.recovery_point.exact_match_flag,
+ sei.payload.recovery_point.broken_link_flag,
+ sei.payload.recovery_point.changing_slice_group_idc);
+ h264parse->keyframe = TRUE;
+ break;
+
+ /* Additional messages that are not innerly useful to the
+ * element but for debugging purposes */
+ case GST_H264_SEI_STEREO_VIDEO_INFO:{
+ GstVideoMultiviewMode mview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ GstVideoMultiviewFlags mview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+
+ GST_LOG_OBJECT (h264parse, "Stereo video information %u %u %u %u %u %u",
+ sei.payload.stereo_video_info.field_views_flag,
+ sei.payload.stereo_video_info.top_field_is_left_view_flag,
+ sei.payload.stereo_video_info.current_frame_is_left_view_flag,
+ sei.payload.stereo_video_info.next_frame_is_second_view_flag,
+ sei.payload.stereo_video_info.left_view_self_contained_flag,
+ sei.payload.stereo_video_info.right_view_self_contained_flag);
+
+ if (sei.payload.stereo_video_info.field_views_flag) {
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
+ if (!sei.payload.stereo_video_info.top_field_is_left_view_flag)
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ } else {
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ if (sei.payload.stereo_video_info.next_frame_is_second_view_flag) {
+ /* Mark current frame as first in bundle */
+ h264parse->first_in_bundle = TRUE;
+ if (!sei.payload.stereo_video_info.current_frame_is_left_view_flag)
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ }
+ }
+ if (mview_mode != h264parse->multiview_mode ||
+ mview_flags != h264parse->multiview_flags) {
+ h264parse->multiview_mode = mview_mode;
+ h264parse->multiview_flags = mview_flags;
+ /* output caps need to be changed */
+ gst_h264_parse_update_src_caps (h264parse, NULL);
+ }
+ break;
+ }
+ case GST_H264_SEI_FRAME_PACKING:{
+ GstVideoMultiviewMode mview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ GstVideoMultiviewFlags mview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+
+ GST_LOG_OBJECT (h264parse,
+ "frame packing arrangement message: id %u cancelled %u "
+ "type %u quincunx %u content_interpretation %d flip %u "
+ "right_first %u field_views %u is_frame0 %u "
+ "frame0_self_contained %u frame1_self_contained %u "
+ "frame0_grid (%u, %u) frame1_grid (%u, %u) "
+ "repetition_period %" G_GUINT16_FORMAT,
+ sei.payload.frame_packing.frame_packing_id,
+ sei.payload.frame_packing.frame_packing_cancel_flag,
+ sei.payload.frame_packing.frame_packing_type,
+ sei.payload.frame_packing.quincunx_sampling_flag,
+ sei.payload.frame_packing.content_interpretation_type,
+ sei.payload.frame_packing.spatial_flipping_flag,
+ sei.payload.frame_packing.frame0_flipped_flag,
+ sei.payload.frame_packing.field_views_flag,
+ sei.payload.frame_packing.current_frame_is_frame0_flag,
+ sei.payload.frame_packing.frame0_self_contained_flag,
+ sei.payload.frame_packing.frame1_self_contained_flag,
+ sei.payload.frame_packing.frame0_grid_position_x,
+ sei.payload.frame_packing.frame0_grid_position_y,
+ sei.payload.frame_packing.frame1_grid_position_x,
+ sei.payload.frame_packing.frame1_grid_position_y,
+ sei.payload.frame_packing.frame_packing_repetition_period);
+
+ /* Only IDs from 0->255 and 512->2^31-1 are valid. Ignore others */
+ if ((sei.payload.frame_packing.frame_packing_id >= 256 &&
+ sei.payload.frame_packing.frame_packing_id < 512) ||
+ (sei.payload.frame_packing.frame_packing_id >= (1U << 31)))
+ break; /* ignore */
+
+ if (!sei.payload.frame_packing.frame_packing_cancel_flag) {
+ /* Cancel flag sets things back to no-info */
+
+ if (sei.payload.frame_packing.content_interpretation_type == 2)
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+
+ switch (sei.payload.frame_packing.frame_packing_type) {
+ case 0:
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
+ break;
+ case 1:
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED;
+ break;
+ case 2:
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
+ break;
+ case 3:
+ if (sei.payload.frame_packing.quincunx_sampling_flag)
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX;
+ else
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
+ if (sei.payload.frame_packing.spatial_flipping_flag) {
+ /* One of the views is flopped. */
+ if (sei.payload.frame_packing.frame0_flipped_flag !=
+ ! !(mview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST))
+ /* the left view is flopped */
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED;
+ else
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED;
+ }
+ break;
+ case 4:
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
+ if (sei.payload.frame_packing.spatial_flipping_flag) {
+ /* One of the views is flipped, */
+ if (sei.payload.frame_packing.frame0_flipped_flag !=
+ ! !(mview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST))
+ /* the left view is flipped */
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED;
+ else
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED;
+ }
+ break;
+ case 5:
+ if (sei.payload.frame_packing.content_interpretation_type == 0)
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME;
+ else
+ mview_mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ break;
+ default:
+ GST_DEBUG_OBJECT (h264parse, "Invalid frame packing type %u",
+ sei.payload.frame_packing.frame_packing_type);
+ break;
+ }
+ }
+
+ if (mview_mode != h264parse->multiview_mode ||
+ mview_flags != h264parse->multiview_flags) {
+ h264parse->multiview_mode = mview_mode;
+ h264parse->multiview_flags = mview_flags;
+ /* output caps need to be changed */
+ gst_h264_parse_update_src_caps (h264parse, NULL);
+ }
+ break;
+ }
+ case GST_H264_SEI_MASTERING_DISPLAY_COLOUR_VOLUME:
+ {
+ /* Precision defined by spec.
+ * See D.2.29 Mastering display colour volume SEI message semantics */
+ GstVideoMasteringDisplayInfo minfo;
+ gint j, k;
+
+ /* GstVideoMasteringDisplayInfo::display_primaries is rgb order but
+ * AVC uses gbr order.
+ * See spec D.2.29 display_primaries_x and display_primaries_y
+ */
+ for (j = 0, k = 2; j < G_N_ELEMENTS (minfo.display_primaries); j++, k++) {
+ minfo.display_primaries[j].x =
+ sei.payload.
+ mastering_display_colour_volume.display_primaries_x[k % 3];
+ minfo.display_primaries[j].y =
+ sei.payload.
+ mastering_display_colour_volume.display_primaries_y[k % 3];
+ }
+
+ minfo.white_point.x =
+ sei.payload.mastering_display_colour_volume.white_point_x;
+ minfo.white_point.y =
+ sei.payload.mastering_display_colour_volume.white_point_y;
+ minfo.max_display_mastering_luminance =
+ sei.payload.mastering_display_colour_volume.
+ max_display_mastering_luminance;
+ minfo.min_display_mastering_luminance =
+ sei.payload.mastering_display_colour_volume.
+ min_display_mastering_luminance;
+
+ GST_LOG_OBJECT (h264parse, "mastering display info found: "
+ "Red(%u, %u) "
+ "Green(%u, %u) "
+ "Blue(%u, %u) "
+ "White(%u, %u) "
+ "max_luminance(%u) "
+ "min_luminance(%u) ",
+ minfo.display_primaries[0].x, minfo.display_primaries[0].y,
+ minfo.display_primaries[1].x, minfo.display_primaries[1].y,
+ minfo.display_primaries[2].x, minfo.display_primaries[2].y,
+ minfo.white_point.x, minfo.white_point.y,
+ minfo.max_display_mastering_luminance,
+ minfo.min_display_mastering_luminance);
+
+ if (h264parse->mastering_display_info_state ==
+ GST_H264_PARSE_SEI_EXPIRED) {
+ h264parse->update_caps = TRUE;
+ } else if (!gst_video_mastering_display_info_is_equal
+ (&h264parse->mastering_display_info, &minfo)) {
+ h264parse->update_caps = TRUE;
+ }
+
+ h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_PARSED;
+ h264parse->mastering_display_info = minfo;
+
+ break;
+ }
+ case GST_H264_SEI_CONTENT_LIGHT_LEVEL:
+ {
+ GstVideoContentLightLevel cll;
+
+ cll.max_content_light_level =
+ sei.payload.content_light_level.max_content_light_level;
+ cll.max_frame_average_light_level =
+ sei.payload.content_light_level.max_pic_average_light_level;
+
+ GST_LOG_OBJECT (h264parse, "content light level found: "
+ "maxCLL:(%u), maxFALL:(%u)", cll.max_content_light_level,
+ cll.max_frame_average_light_level);
+
+ if (h264parse->content_light_level_state == GST_H264_PARSE_SEI_EXPIRED) {
+ h264parse->update_caps = TRUE;
+ } else if (cll.max_content_light_level !=
+ h264parse->content_light_level.max_content_light_level ||
+ cll.max_frame_average_light_level !=
+ h264parse->content_light_level.max_frame_average_light_level) {
+ h264parse->update_caps = TRUE;
+ }
+
+ h264parse->content_light_level_state = GST_H264_PARSE_SEI_PARSED;
+ h264parse->content_light_level = cll;
+
+ break;
+ }
+ default:{
+ gint payload_type = sei.payloadType;
+
+ if (payload_type == GST_H264_SEI_UNHANDLED_PAYLOAD) {
+ GstH264SEIUnhandledPayload *unhandled =
+ &sei.payload.unhandled_payload;
+ payload_type = unhandled->payloadType;
+ }
+
+ GST_LOG_OBJECT (h264parse, "Unsupported payload type %d", payload_type);
+ break;
+ }
+ }
+ }
+ g_array_free (messages, TRUE);
+ }
+
+ /* caller guarantees 2 bytes of nal payload */
+ static gboolean
+ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
+ {
+ guint nal_type;
+ GstH264PPS pps = { 0, };
+ GstH264SPS sps = { 0, };
+ GstH264NalParser *nalparser = h264parse->nalparser;
+ GstH264ParserResult pres;
+ GstH264SliceHdr slice;
+
+ /* nothing to do for broken input */
+ if (G_UNLIKELY (nalu->size < 2)) {
+ GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
+ return TRUE;
+ }
+
+ /* we have a peek as well */
+ nal_type = nalu->type;
+
+ GST_DEBUG_OBJECT (h264parse, "processing nal of type %u %s, size %u",
+ nal_type, _nal_name (nal_type), nalu->size);
+
+ switch (nal_type) {
+ case GST_H264_NAL_SUBSET_SPS:
+ if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
+ return FALSE;
+ pres = gst_h264_parser_parse_subset_sps (nalparser, nalu, &sps);
+ goto process_sps;
+
+ case GST_H264_NAL_SPS:
+ /* reset state, everything else is obsolete */
+ h264parse->state &= GST_H264_PARSE_STATE_GOT_PPS;
+ pres = gst_h264_parser_parse_sps (nalparser, nalu, &sps);
+
+ process_sps:
+ /* arranged for a fallback sps.id, so use that one and only warn */
+ if (pres != GST_H264_PARSER_OK) {
+ GST_WARNING_OBJECT (h264parse, "failed to parse SPS:");
+ h264parse->state |= GST_H264_PARSE_STATE_GOT_SPS;
+ h264parse->header = TRUE;
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
+ h264parse->update_caps = TRUE;
+ h264parse->have_sps = TRUE;
+ h264parse->have_sps_in_frame = TRUE;
+ if (h264parse->push_codec && h264parse->have_pps) {
+ /* SPS and PPS found in stream before the first pre_push_frame, no need
+ * to forcibly push at start */
+ GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ }
+
+ gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
+ gst_h264_sps_clear (&sps);
+ h264parse->state |= GST_H264_PARSE_STATE_GOT_SPS;
+ h264parse->header = TRUE;
+ break;
+ case GST_H264_NAL_PPS:
+ /* expected state: got-sps */
+ h264parse->state &= GST_H264_PARSE_STATE_GOT_SPS;
+ if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
+ return FALSE;
+
+ pres = gst_h264_parser_parse_pps (nalparser, nalu, &pps);
+ /* arranged for a fallback pps.id, so use that one and only warn */
+ if (pres != GST_H264_PARSER_OK) {
+ GST_WARNING_OBJECT (h264parse, "failed to parse PPS:");
+ if (pres != GST_H264_PARSER_BROKEN_LINK)
+ return FALSE;
+ }
+
+ /* parameters might have changed, force caps check */
+ if (!h264parse->have_pps) {
+ GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
+ h264parse->update_caps = TRUE;
+ }
+ h264parse->have_pps = TRUE;
+ h264parse->have_pps_in_frame = TRUE;
+ if (h264parse->push_codec && h264parse->have_sps) {
+ /* SPS and PPS found in stream before the first pre_push_frame, no need
+ * to forcibly push at start */
+ GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ }
+
+ gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
+ gst_h264_pps_clear (&pps);
+ h264parse->state |= GST_H264_PARSE_STATE_GOT_PPS;
+ h264parse->header = TRUE;
+ break;
+ case GST_H264_NAL_SEI:
+ /* expected state: got-sps */
+ if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
+ return FALSE;
+
+ h264parse->header = TRUE;
+ gst_h264_parse_process_sei (h264parse, nalu);
+ /* mark SEI pos */
+ if (h264parse->sei_pos == -1) {
+ if (h264parse->transform)
+ h264parse->sei_pos = gst_adapter_available (h264parse->frame_out);
+ else
+ h264parse->sei_pos = nalu->sc_offset;
+ GST_DEBUG_OBJECT (h264parse, "marking SEI in frame at offset %d",
+ h264parse->sei_pos);
+ }
+ break;
+
+ case GST_H264_NAL_SLICE:
+ case GST_H264_NAL_SLICE_DPA:
+ case GST_H264_NAL_SLICE_DPB:
+ case GST_H264_NAL_SLICE_DPC:
+ case GST_H264_NAL_SLICE_IDR:
+ case GST_H264_NAL_SLICE_EXT:
+ /* expected state: got-sps|got-pps (valid picture headers) */
+ h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
+ if (!GST_H264_PARSE_STATE_VALID (h264parse,
+ GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS))
+ return FALSE;
+
+ /* This is similar to the GOT_SLICE state, but is only reset when the
+ * AU is complete. This is used to keep track of AU */
+ h264parse->picture_start = TRUE;
+
+ /* don't need to parse the whole slice (header) here */
+ if (*(nalu->data + nalu->offset + nalu->header_bytes) & 0x80) {
+ /* means first_mb_in_slice == 0 */
+ /* real frame data */
+ GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
+ h264parse->frame_start = TRUE;
+ }
+ GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
+ if (nal_type == GST_H264_NAL_SLICE_EXT && !GST_H264_IS_MVC_NALU (nalu))
+ break;
+
+ pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
+ FALSE, FALSE);
+ GST_DEBUG_OBJECT (h264parse,
+ "parse result %d, first MB: %u, slice type: %u",
+ pres, slice.first_mb_in_slice, slice.type);
+ if (pres == GST_H264_PARSER_OK) {
+ if (GST_H264_IS_I_SLICE (&slice) || GST_H264_IS_SI_SLICE (&slice))
+ h264parse->keyframe = TRUE;
+ else if (GST_H264_IS_P_SLICE (&slice)
+ || GST_H264_IS_SP_SLICE (&slice))
+ h264parse->predicted = TRUE;
+ else if (GST_H264_IS_B_SLICE (&slice))
+ h264parse->bidirectional = TRUE;
+
+ h264parse->state |= GST_H264_PARSE_STATE_GOT_SLICE;
+ h264parse->field_pic_flag = slice.field_pic_flag;
+ }
+
+ if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
+ !h264parse->push_codec))
+ break;
+
+ /* if we need to sneak codec NALs into the stream,
+ * this is a good place, so fake it as IDR
+ * (which should be at start anyway) */
+ /* mark where config needs to go if interval expired */
+ /* mind replacement buffer if applicable */
+ if (h264parse->idr_pos == -1) {
+ if (h264parse->transform)
+ h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
+ else
+ h264parse->idr_pos = nalu->sc_offset;
+ GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
+ h264parse->idr_pos);
+ }
+ /* if SEI precedes (faked) IDR, then we have to insert config there */
+ if (h264parse->sei_pos >= 0 && h264parse->idr_pos > h264parse->sei_pos) {
+ h264parse->idr_pos = h264parse->sei_pos;
+ GST_DEBUG_OBJECT (h264parse, "moved IDR mark to SEI position %d",
+ h264parse->idr_pos);
+ }
+ /* Reset state only on first IDR slice of CVS D.2.29 */
+ if (slice.first_mb_in_slice == 0) {
+ if (h264parse->mastering_display_info_state ==
+ GST_H264_PARSE_SEI_PARSED)
+ h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_ACTIVE;
+ else if (h264parse->mastering_display_info_state ==
+ GST_H264_PARSE_SEI_ACTIVE)
+ h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_EXPIRED;
+
+ if (h264parse->content_light_level_state == GST_H264_PARSE_SEI_PARSED)
+ h264parse->content_light_level_state = GST_H264_PARSE_SEI_ACTIVE;
+ else if (h264parse->content_light_level_state ==
+ GST_H264_PARSE_SEI_ACTIVE)
+ h264parse->content_light_level_state = GST_H264_PARSE_SEI_EXPIRED;
+ }
+ break;
+ case GST_H264_NAL_AU_DELIMITER:
+ /* Just accumulate AU Delimiter, whether it's before SPS or not */
+ pres = gst_h264_parser_parse_nal (nalparser, nalu);
+ if (pres != GST_H264_PARSER_OK)
+ return FALSE;
+ h264parse->aud_needed = FALSE;
+ break;
+ default:
+ /* drop anything before the initial SPS */
+ if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
+ return FALSE;
+
+ pres = gst_h264_parser_parse_nal (nalparser, nalu);
+ if (pres != GST_H264_PARSER_OK)
+ return FALSE;
+ break;
+ }
+
+ /* if AVC output needed, collect properly prefixed nal in adapter,
+ * and use that to replace outgoing buffer data later on */
+ if (h264parse->transform) {
+ GstBuffer *buf;
+
+ GST_LOG_OBJECT (h264parse, "collecting NAL in AVC frame");
+ buf = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
+ nalu->data + nalu->offset, nalu->size);
+ gst_adapter_push (h264parse->frame_out, buf);
+ }
+ return TRUE;
+ }
+
+ /* caller guarantees at least 2 bytes of nal payload for each nal
+ * returns TRUE if next_nal indicates that nal terminates an AU */
+ static inline gboolean
+ gst_h264_parse_collect_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
+ {
+ GstH264NalUnitType nal_type = nalu->type;
+ gboolean complete;
+
+ /* determine if AU complete */
+ GST_LOG_OBJECT (h264parse, "next nal type: %d %s (picture started %i)",
+ nal_type, _nal_name (nal_type), h264parse->picture_start);
+
+ /* consider a coded slices (IDR or not) to start a picture,
+ * (so ending the previous one) if first_mb_in_slice == 0
+ * (non-0 is part of previous one) */
+ /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
+ * but in practice it works in sane cases, needs not much parsing,
+ * and also works with broken frame_num in NAL
+ * (where spec-wise would fail) */
+ complete = h264parse->picture_start && ((nal_type >= GST_H264_NAL_SEI &&
+ nal_type <= GST_H264_NAL_AU_DELIMITER) ||
+ (nal_type >= 14 && nal_type <= 18));
+
+ /* first_mb_in_slice == 0 considered start of frame */
+ if (nalu->size > nalu->header_bytes)
+ complete |= h264parse->picture_start && (nal_type == GST_H264_NAL_SLICE
+ || nal_type == GST_H264_NAL_SLICE_DPA
+ || nal_type == GST_H264_NAL_SLICE_IDR) &&
+ (nalu->data[nalu->offset + nalu->header_bytes] & 0x80);
+
+ GST_LOG_OBJECT (h264parse, "au complete: %d", complete);
+
+ if (complete)
+ h264parse->picture_start = FALSE;
+
+ return complete;
+ }
+
+ static guint8 au_delim[6] = {
+ 0x00, 0x00, 0x00, 0x01, /* nal prefix */
+ 0x09, /* nal unit type = access unit delimiter */
+ 0xf0 /* allow any slice type */
+ };
+
+ static GstFlowReturn
+ gst_h264_parse_handle_frame_packetized (GstBaseParse * parse,
+ GstBaseParseFrame * frame)
+ {
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+ GstBuffer *buffer = frame->buffer;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstH264ParserResult parse_res;
+ GstH264NalUnit nalu;
+ const guint nl = h264parse->nal_length_size;
+ GstMapInfo map;
+ gint left;
+
+ if (nl < 1 || nl > 4) {
+ GST_DEBUG_OBJECT (h264parse, "insufficient data to split input");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ /* need to save buffer from invalidation upon _finish_frame */
+ if (h264parse->split_packetized)
+ buffer = gst_buffer_copy (frame->buffer);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ left = map.size;
+
+ GST_LOG_OBJECT (h264parse,
+ "processing packet buffer of size %" G_GSIZE_FORMAT, map.size);
+
+ parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
+ map.data, 0, map.size, nl, &nalu);
+
+ /* there is no AUD in AVC, always enable insertion, the pre_push function
+ * will only add it once, and will only add it for byte-stream output. */
+ h264parse->aud_insert = TRUE;
+
+ while (parse_res == GST_H264_PARSER_OK) {
+ GST_DEBUG_OBJECT (h264parse, "AVC nal offset %d", nalu.offset + nalu.size);
+
+ /* either way, have a look at it */
+ gst_h264_parse_process_nal (h264parse, &nalu);
+
+ /* dispatch per NALU if needed */
+ if (h264parse->split_packetized) {
+ GstBaseParseFrame tmp_frame;
+
+ gst_base_parse_frame_init (&tmp_frame);
+ tmp_frame.flags |= frame->flags;
+ tmp_frame.offset = frame->offset;
+ tmp_frame.overhead = frame->overhead;
+ tmp_frame.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
+ nalu.offset, nalu.size);
+
+ /* Set marker on last packet */
+ if (nl + nalu.size == left) {
+ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_MARKER))
+ h264parse->marker = TRUE;
+ }
+
+ /* note we don't need to come up with a sub-buffer, since
+ * subsequent code only considers input buffer's metadata.
+ * Real data is either taken from input by baseclass or
+ * a replacement output buffer is provided anyway. */
+ gst_h264_parse_parse_frame (parse, &tmp_frame);
+ ret = gst_base_parse_finish_frame (parse, &tmp_frame, nl + nalu.size);
+ left -= nl + nalu.size;
+ }
+
+ parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
+ map.data, nalu.offset + nalu.size, map.size, nl, &nalu);
+ }
+
+ gst_buffer_unmap (buffer, &map);
+
+ if (!h264parse->split_packetized) {
+ h264parse->marker = TRUE;
+ gst_h264_parse_parse_frame (parse, frame);
+ ret = gst_base_parse_finish_frame (parse, frame, map.size);
+ } else {
+ gst_buffer_unref (buffer);
+ if (G_UNLIKELY (left)) {
+ /* should not be happening for nice AVC */
+ GST_WARNING_OBJECT (parse, "skipping leftover AVC data %d", left);
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
+ ret = gst_base_parse_finish_frame (parse, frame, map.size);
+ }
+ }
+
+ if (parse_res == GST_H264_PARSER_NO_NAL_END ||
+ parse_res == GST_H264_PARSER_BROKEN_DATA) {
+
+ if (h264parse->split_packetized) {
+ GST_ELEMENT_ERROR (h264parse, STREAM, FAILED, (NULL),
+ ("invalid AVC input data"));
+
+ return GST_FLOW_ERROR;
+ } else {
+ /* do not meddle to much in this case */
+ GST_DEBUG_OBJECT (h264parse, "parsing packet failed");
+ }
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_h264_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+ {
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+ GstBuffer *buffer = frame->buffer;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ gint current_off = 0;
+ gboolean drain, nonext;
+ GstH264NalParser *nalparser = h264parse->nalparser;
+ GstH264NalUnit nalu;
+ GstH264ParserResult pres;
+ gint framesize;
++#ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
++ GstH264NalUnitType last_nal_type = GST_H264_NAL_UNKNOWN;
++#endif
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (frame->buffer,
+ GST_BUFFER_FLAG_DISCONT))) {
+ h264parse->discont = TRUE;
+ }
+
+ /* delegate in packetized case, no skipping should be needed */
+ if (h264parse->packetized)
+ return gst_h264_parse_handle_frame_packetized (parse, frame);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* expect at least 3 bytes start_code, and 1 bytes NALU header.
+ * the length of the NALU payload can be zero.
+ * (e.g. EOS/EOB placed at the end of an AU.) */
+ if (G_UNLIKELY (size < 4)) {
+ gst_buffer_unmap (buffer, &map);
+ *skipsize = 1;
+ return GST_FLOW_OK;
+ }
+
+ /* need to configure aggregation */
+ if (G_UNLIKELY (h264parse->format == GST_H264_PARSE_FORMAT_NONE))
+ gst_h264_parse_negotiate (h264parse, GST_H264_PARSE_FORMAT_BYTE, NULL);
+
+ /* avoid stale cached parsing state */
+ if (frame->flags & GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME) {
+ GST_LOG_OBJECT (h264parse, "parsing new frame");
+ gst_h264_parse_reset_frame (h264parse);
+ } else {
+ GST_LOG_OBJECT (h264parse, "resuming frame parsing");
+ }
+
+ /* Always consume the entire input buffer when in_align == ALIGN_AU */
+ drain = GST_BASE_PARSE_DRAINING (parse)
+ || h264parse->in_align == GST_H264_PARSE_ALIGN_AU;
+ nonext = FALSE;
+
+ current_off = h264parse->current_off;
+ if (current_off < 0)
+ current_off = 0;
+
+ /* The parser is being drain, but no new data was added, just prentend this
+ * AU is complete */
+ if (drain && current_off == size) {
+ GST_DEBUG_OBJECT (h264parse, "draining with no new data");
+ nalu.size = 0;
+ nalu.offset = current_off;
+ goto end;
+ }
+
+ g_assert (current_off < size);
+ GST_DEBUG_OBJECT (h264parse, "last parse position %d", current_off);
+
+ /* check for initial skip */
+ if (h264parse->current_off == -1) {
+ pres =
+ gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
+ size, &nalu);
+ switch (pres) {
+ case GST_H264_PARSER_OK:
+ if (nalu.sc_offset > 0) {
+ *skipsize = nalu.sc_offset;
+ goto skip;
+ }
+ break;
+ case GST_H264_PARSER_NO_NAL:
+ /* Start code may have up to 4 bytes */
+ *skipsize = size - 4;
+ goto skip;
+ break;
+ default:
+ /* should not really occur either */
+ GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
+ ("Error parsing H.264 stream"), ("Invalid H.264 stream"));
+ goto invalid_stream;
+ }
+
+ /* Ensure we use the TS of the first NAL. This avoids broken timestamp in
+ * the case of a miss-placed filler byte. */
+ gst_base_parse_set_ts_at_offset (parse, nalu.offset);
+ }
+
+ while (TRUE) {
+ pres =
+ gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
+ &nalu);
+
+ switch (pres) {
+ case GST_H264_PARSER_OK:
+ GST_DEBUG_OBJECT (h264parse, "complete nal (offset, size): (%u, %u) ",
+ nalu.offset, nalu.size);
++#ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
++ last_nal_type = nalu.type;
++#endif
+ break;
+ case GST_H264_PARSER_NO_NAL_END:
+ /* In NAL alignment, assume the NAL is complete */
+ if (h264parse->in_align == GST_H264_PARSE_ALIGN_NAL ||
+ h264parse->in_align == GST_H264_PARSE_ALIGN_AU) {
+ nonext = TRUE;
+ nalu.size = size - nalu.offset;
+ break;
+ }
+ GST_DEBUG_OBJECT (h264parse, "not a complete nal found at offset %u",
+ nalu.offset);
+ /* if draining, accept it as complete nal */
+ if (drain) {
+ nonext = TRUE;
+ nalu.size = size - nalu.offset;
+ GST_DEBUG_OBJECT (h264parse, "draining, accepting with size %u",
+ nalu.size);
+ /* if it's not too short at least */
+ if (nalu.size < 2)
+ goto broken;
+ break;
+ }
+ /* otherwise need more */
+ goto more;
+ case GST_H264_PARSER_BROKEN_LINK:
+ GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
+ ("Error parsing H.264 stream"),
+ ("The link to structure needed for the parsing couldn't be found"));
+ goto invalid_stream;
+ case GST_H264_PARSER_ERROR:
+ /* should not really occur either */
+ GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
+ ("Error parsing H.264 stream"), ("Invalid H.264 stream"));
+ goto invalid_stream;
+ case GST_H264_PARSER_NO_NAL:
++#ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
++ if (last_nal_type == GST_H264_NAL_SEQ_END) {
++ GST_WARNING_OBJECT (h264parse, "No H.264 NAL unit found, but last "
++ "nal type is SEQ_END, So push remain buffer");
++ gst_buffer_unmap (buffer, &map);
++ gst_h264_parse_parse_frame (parse, frame);
++ return gst_base_parse_finish_frame (parse, frame, size);
++ }
++#endif
+ GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
+ ("Error parsing H.264 stream"), ("No H.264 NAL unit found"));
+ goto invalid_stream;
+ case GST_H264_PARSER_BROKEN_DATA:
+ GST_WARNING_OBJECT (h264parse, "input stream is corrupt; "
+ "it contains a NAL unit of length %u", nalu.size);
+ broken:
+ /* broken nal at start -> arrange to skip it,
+ * otherwise have it terminate current au
+ * (and so it will be skipped on next frame round) */
+ GST_ELEMENT_WARNING (h264parse, STREAM, DECODE,
+ (NULL), ("Broken bit stream"));
+ if (current_off == 0) {
+ GST_DEBUG_OBJECT (h264parse, "skipping broken nal");
+ *skipsize = nalu.offset;
+ goto skip;
+ } else {
+ GST_DEBUG_OBJECT (h264parse, "terminating au");
+ nalu.size = 0;
+ nalu.offset = nalu.sc_offset;
+ goto end;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ GST_DEBUG_OBJECT (h264parse, "%p complete nal found. Off: %u, Size: %u",
+ data, nalu.offset, nalu.size);
+
+ if (gst_h264_parse_collect_nal (h264parse, &nalu)) {
+ h264parse->aud_needed = TRUE;
+ /* complete current frame, if it exist */
+ if (current_off > 0) {
+ nalu.size = 0;
+ nalu.offset = nalu.sc_offset;
+ h264parse->marker = TRUE;
+ break;
+ }
+ }
+
+ if (!gst_h264_parse_process_nal (h264parse, &nalu)) {
+ GST_WARNING_OBJECT (h264parse,
+ "broken/invalid nal Type: %d %s, Size: %u will be dropped",
+ nalu.type, _nal_name (nalu.type), nalu.size);
+ *skipsize = nalu.size;
+ goto skip;
+ }
+
+ /* Make sure the next buffer will contain an AUD */
+ if (h264parse->aud_needed) {
+ h264parse->aud_insert = TRUE;
+ h264parse->aud_needed = FALSE;
+ }
+
+ /* Do not push immediately if we don't have all headers. This ensure that
+ * our caps are complete, avoiding a renegotiation */
+ if (h264parse->align == GST_H264_PARSE_ALIGN_NAL &&
+ !GST_H264_PARSE_STATE_VALID (h264parse,
+ GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS))
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_QUEUE;
+
+ /* if no next nal, we reached the end of this buffer */
+ if (nonext) {
+ /* If there is a marker flag, or input is AU, we know this is complete */
+ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_MARKER) ||
+ h264parse->in_align == GST_H264_PARSE_ALIGN_AU) {
+ h264parse->marker = TRUE;
+ break;
+ }
+
+ /* or if we are draining */
+ if (drain || h264parse->align == GST_H264_PARSE_ALIGN_NAL)
+ break;
+
+ current_off = nalu.offset + nalu.size;
+ goto more;
+ }
+
+ /* If the output is NAL, we are done */
+ if (h264parse->align == GST_H264_PARSE_ALIGN_NAL)
+ break;
+
+ GST_DEBUG_OBJECT (h264parse, "Looking for more");
+ current_off = nalu.offset + nalu.size;
+
+ /* expect at least 3 bytes start_code, and 1 bytes NALU header.
+ * the length of the NALU payload can be zero.
+ * (e.g. EOS/EOB placed at the end of an AU.) */
+ if (size - current_off < 4) {
+ /* Finish the frame if there is no more data in the stream */
+ if (drain)
+ break;
+
+ goto more;
+ }
+ }
+
+ end:
+ framesize = nalu.offset + nalu.size;
+
+ gst_buffer_unmap (buffer, &map);
+
+ gst_h264_parse_parse_frame (parse, frame);
+
+ return gst_base_parse_finish_frame (parse, frame, framesize);
+
+ more:
+ *skipsize = 0;
+
+ /* Restart parsing from here next time */
+ if (current_off > 0)
+ h264parse->current_off = current_off;
+
+ /* Fall-through. */
+ out:
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_OK;
+
+ skip:
+ GST_DEBUG_OBJECT (h264parse, "skipping %d", *skipsize);
+ /* If we are collecting access units, we need to preserve the initial
+ * config headers (SPS, PPS et al.) and only reset the frame if another
+ * slice NAL was received. This means that broken pictures are discarded */
+ if (h264parse->align != GST_H264_PARSE_ALIGN_AU ||
+ !(h264parse->state & GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS) ||
+ (h264parse->state & GST_H264_PARSE_STATE_GOT_SLICE))
+ gst_h264_parse_reset_frame (h264parse);
+ goto out;
+
+ invalid_stream:
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_ERROR;
+ }
+
+ /* byte together avc codec data based on collected pps and sps so far */
+ static GstBuffer *
+ gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
+ {
+ GstBuffer *buf, *nal;
+ gint i, sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
+ guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
+ gboolean found = FALSE;
+ GstMapInfo map;
+ guint8 *data;
+ gint nl;
+
+ /* only nal payload in stored nals */
+
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if ((nal = h264parse->sps_nals[i])) {
+ gsize size = gst_buffer_get_size (nal);
+ num_sps++;
+ /* size bytes also count */
+ sps_size += size + 2;
+ if (size >= 4) {
+ guint8 tmp[3];
+ found = TRUE;
+ gst_buffer_extract (nal, 1, tmp, 3);
+ profile_idc = tmp[0];
+ profile_comp = tmp[1];
+ level_idc = tmp[2];
+ }
+ }
+ }
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if ((nal = h264parse->pps_nals[i])) {
+ num_pps++;
+ /* size bytes also count */
+ pps_size += gst_buffer_get_size (nal) + 2;
+ }
+ }
+
+ /* AVC3 has SPS/PPS inside the stream, not in the codec_data */
+ if (h264parse->format == GST_H264_PARSE_FORMAT_AVC3) {
+ num_sps = sps_size = 0;
+ num_pps = pps_size = 0;
+ }
+
+ GST_DEBUG_OBJECT (h264parse,
+ "constructing codec_data: num_sps=%d, num_pps=%d", num_sps, num_pps);
+
+ if (!found || (0 == num_pps
+ && GST_H264_PARSE_FORMAT_AVC3 != h264parse->format))
+ return NULL;
+
+ buf = gst_buffer_new_allocate (NULL, 5 + 1 + sps_size + 1 + pps_size, NULL);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
+ nl = h264parse->nal_length_size;
+
+ data[0] = 1; /* AVC Decoder Configuration Record ver. 1 */
+ data[1] = profile_idc; /* profile_idc */
+ data[2] = profile_comp; /* profile_compability */
+ data[3] = level_idc; /* level_idc */
+ data[4] = 0xfc | (nl - 1); /* nal_length_size_minus1 */
+ data[5] = 0xe0 | num_sps; /* number of SPSs */
+
+ data += 6;
+ if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if ((nal = h264parse->sps_nals[i])) {
+ gsize nal_size = gst_buffer_get_size (nal);
+ GST_WRITE_UINT16_BE (data, nal_size);
+ gst_buffer_extract (nal, 0, data + 2, nal_size);
+ data += 2 + nal_size;
+ }
+ }
+ }
+
+ data[0] = num_pps;
+ data++;
+ if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if ((nal = h264parse->pps_nals[i])) {
+ gsize nal_size = gst_buffer_get_size (nal);
+ GST_WRITE_UINT16_BE (data, nal_size);
+ gst_buffer_extract (nal, 0, data + 2, nal_size);
+ data += 2 + nal_size;
+ }
+ }
+ }
+
+ gst_buffer_unmap (buf, &map);
+
+ return buf;
+ }
+
+ static void
+ gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
+ {
+ if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
+ *num = h264parse->upstream_par_n;
+ *den = h264parse->upstream_par_d;
+ } else {
+ *num = h264parse->parsed_par_n;
+ *den = h264parse->parsed_par_d;
+ }
+ }
+
+ static GstCaps *
+ get_compatible_profile_caps (GstH264SPS * sps)
+ {
+ GstCaps *caps = NULL;
+ const gchar **profiles = NULL;
+ gint i;
+ GValue compat_profiles = G_VALUE_INIT;
+ g_value_init (&compat_profiles, GST_TYPE_LIST);
+
+ switch (sps->profile_idc) {
+ case GST_H264_PROFILE_EXTENDED:
+ if (sps->constraint_set0_flag) { /* A.2.1 */
+ if (sps->constraint_set1_flag) {
+ static const gchar *profile_array[] =
+ { "constrained-baseline", "baseline", "main", "high",
+ "high-10", "high-4:2:2", "high-4:4:4", NULL
+ };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] = { "baseline", NULL };
+ profiles = profile_array;
+ }
+ } else if (sps->constraint_set1_flag) { /* A.2.2 */
+ static const gchar *profile_array[] =
+ { "main", "high", "high-10", "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ break;
+ case GST_H264_PROFILE_BASELINE:
+ if (sps->constraint_set1_flag) { /* A.2.1 */
+ static const gchar *profile_array[] =
+ { "baseline", "main", "high", "high-10", "high-4:2:2",
+ "high-4:4:4", NULL
+ };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] = { "extended", NULL };
+ profiles = profile_array;
+ }
+ break;
+ case GST_H264_PROFILE_MAIN:
+ {
+ static const gchar *profile_array[] =
+ { "high", "high-10", "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ break;
+ case GST_H264_PROFILE_HIGH:
+ if (sps->constraint_set1_flag) {
+ static const gchar *profile_array[] =
+ { "main", "high-10", "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] =
+ { "high-10", "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ break;
+ case GST_H264_PROFILE_HIGH10:
+ if (sps->constraint_set1_flag) {
+ static const gchar *profile_array[] =
+ { "main", "high", "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ } else {
+ if (sps->constraint_set3_flag) { /* A.2.8 */
+ static const gchar *profile_array[] =
+ { "high-10", "high-4:2:2", "high-4:4:4", "high-4:2:2-intra",
+ "high-4:4:4-intra", NULL
+ };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] =
+ { "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ }
+ break;
+ case GST_H264_PROFILE_HIGH_422:
+ if (sps->constraint_set1_flag) {
+ static const gchar *profile_array[] =
+ { "main", "high", "high-10", "high-4:4:4", NULL };
+ profiles = profile_array;
+ } else {
+ if (sps->constraint_set3_flag) { /* A.2.9 */
+ static const gchar *profile_array[] =
+ { "high-4:2:2", "high-4:4:4", "high-4:2:2-intra",
+ "high-4:4:4-intra", NULL
+ };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] =
+ { "high-4:2:2", "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ }
+ break;
+ case GST_H264_PROFILE_HIGH_444:
+ if (sps->constraint_set1_flag) {
+ static const gchar *profile_array[] =
+ { "main", "high", "high-10", "high-4:2:2", NULL };
+ profiles = profile_array;
+ } else if (sps->constraint_set3_flag) { /* A.2.10 */
+ static const gchar *profile_array[] = { "high-4:4:4", NULL };
+ profiles = profile_array;
+ }
+ break;
+ case GST_H264_PROFILE_MULTIVIEW_HIGH:
+ if (sps->extension_type == GST_H264_NAL_EXTENSION_MVC
+ && sps->extension.mvc.num_views_minus1 == 1) {
+ static const gchar *profile_array[] =
+ { "stereo-high", "multiview-high", NULL };
+ profiles = profile_array;
+ } else {
+ static const gchar *profile_array[] = { "multiview-high", NULL };
+ profiles = profile_array;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (profiles) {
+ GValue value = G_VALUE_INIT;
+ caps = gst_caps_new_empty_simple ("video/x-h264");
+ for (i = 0; profiles[i]; i++) {
+ g_value_init (&value, G_TYPE_STRING);
+ g_value_set_string (&value, profiles[i]);
+ gst_value_list_append_value (&compat_profiles, &value);
+ g_value_unset (&value);
+ }
+ gst_caps_set_value (caps, "profile", &compat_profiles);
+ g_value_unset (&compat_profiles);
+ }
+
+ return caps;
+ }
+
+ /* if downstream didn't support the exact profile indicated in sps header,
+ * check for the compatible profiles also */
+ static void
+ ensure_caps_profile (GstH264Parse * h264parse, GstCaps * caps, GstH264SPS * sps)
+ {
+ GstCaps *peer_caps, *compat_caps;
+
+ peer_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
+ if (!peer_caps || !gst_caps_can_intersect (caps, peer_caps)) {
+ GstCaps *filter_caps = gst_caps_new_empty_simple ("video/x-h264");
+
+ if (peer_caps)
+ gst_caps_unref (peer_caps);
+ peer_caps =
+ gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (h264parse),
+ filter_caps);
+
+ gst_caps_unref (filter_caps);
+ }
+
+ if (peer_caps && !gst_caps_can_intersect (caps, peer_caps)) {
+ GstStructure *structure;
+
+ compat_caps = get_compatible_profile_caps (sps);
+ if (compat_caps != NULL) {
+ GstCaps *res_caps = NULL;
+
+ res_caps = gst_caps_intersect (peer_caps, compat_caps);
+
+ if (res_caps && !gst_caps_is_empty (res_caps)) {
+ const gchar *profile_str = NULL;
+
+ res_caps = gst_caps_fixate (res_caps);
+ structure = gst_caps_get_structure (res_caps, 0);
+ profile_str = gst_structure_get_string (structure, "profile");
+ if (profile_str) {
+ gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_str,
+ NULL);
+ GST_DEBUG_OBJECT (h264parse,
+ "Setting compatible profile %s to the caps", profile_str);
+ }
+ }
+ if (res_caps)
+ gst_caps_unref (res_caps);
+ gst_caps_unref (compat_caps);
+ }
+ }
+ if (peer_caps)
+ gst_caps_unref (peer_caps);
+ }
+
+ static const gchar *
+ digit_to_string (guint digit)
+ {
+ static const char itoa[][2] = {
+ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
+ };
+
+ if (G_LIKELY (digit < 10))
+ return itoa[digit];
+ else
+ return NULL;
+ }
+
+ static const gchar *
+ get_profile_string (GstH264SPS * sps)
+ {
+ const gchar *profile = NULL;
+
+ switch (sps->profile_idc) {
+ case 66:
+ if (sps->constraint_set1_flag)
+ profile = "constrained-baseline";
+ else
+ profile = "baseline";
+ break;
+ case 77:
+ profile = "main";
+ break;
+ case 88:
+ profile = "extended";
+ break;
+ case 100:
+ if (sps->constraint_set4_flag) {
+ if (sps->constraint_set5_flag)
+ profile = "constrained-high";
+ else
+ profile = "progressive-high";
+ } else
+ profile = "high";
+ break;
+ case 110:
+ if (sps->constraint_set3_flag)
+ profile = "high-10-intra";
+ else if (sps->constraint_set4_flag)
+ profile = "progressive-high-10";
+ else
+ profile = "high-10";
+ break;
+ case 122:
+ if (sps->constraint_set3_flag)
+ profile = "high-4:2:2-intra";
+ else
+ profile = "high-4:2:2";
+ break;
+ case 244:
+ if (sps->constraint_set3_flag)
+ profile = "high-4:4:4-intra";
+ else
+ profile = "high-4:4:4";
+ break;
+ case 44:
+ profile = "cavlc-4:4:4-intra";
+ break;
+ case 118:
+ profile = "multiview-high";
+ break;
+ case 128:
+ profile = "stereo-high";
+ break;
+ case 83:
+ if (sps->constraint_set5_flag)
+ profile = "scalable-constrained-baseline";
+ else
+ profile = "scalable-baseline";
+ break;
+ case 86:
+ if (sps->constraint_set3_flag)
+ profile = "scalable-high-intra";
+ else if (sps->constraint_set5_flag)
+ profile = "scalable-constrained-high";
+ else
+ profile = "scalable-high";
+ break;
+ default:
+ return NULL;
+ }
+
+ return profile;
+ }
+
+ static const gchar *
+ get_level_string (GstH264SPS * sps)
+ {
+ if (sps->level_idc == 0)
+ return NULL;
+ else if ((sps->level_idc == 11 && sps->constraint_set3_flag)
+ || sps->level_idc == 9)
+ return "1b";
+ else if (sps->level_idc % 10 == 0)
+ return digit_to_string (sps->level_idc / 10);
+ else {
+ switch (sps->level_idc) {
+ case 11:
+ return "1.1";
+ case 12:
+ return "1.2";
+ case 13:
+ return "1.3";
+ case 21:
+ return "2.1";
+ case 22:
+ return "2.2";
+ case 31:
+ return "3.1";
+ case 32:
+ return "3.2";
+ case 41:
+ return "4.1";
+ case 42:
+ return "4.2";
+ case 51:
+ return "5.1";
+ case 52:
+ return "5.2";
+ case 61:
+ return "6.1";
+ case 62:
+ return "6.2";
+ default:
+ return NULL;
+ }
+ }
+ }
+
+ static void
+ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
+ {
+ GstH264SPS *sps;
+ GstCaps *sink_caps, *src_caps;
+ gboolean modified = FALSE;
+ GstBuffer *buf = NULL;
+ GstStructure *s = NULL;
+
+ if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD
+ (h264parse))))
+ modified = TRUE;
+ else if (G_UNLIKELY (!h264parse->update_caps))
+ return;
+
+ /* if this is being called from the first _setcaps call, caps on the sinkpad
+ * aren't set yet and so they need to be passed as an argument */
+ if (caps)
+ sink_caps = gst_caps_ref (caps);
+ else
+ sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (h264parse));
+
+ /* carry over input caps as much as possible; override with our own stuff */
+ if (!sink_caps)
+ sink_caps = gst_caps_new_empty_simple ("video/x-h264");
+ else
+ s = gst_caps_get_structure (sink_caps, 0);
+
+ sps = h264parse->nalparser->last_sps;
+ GST_DEBUG_OBJECT (h264parse, "sps: %p", sps);
+
+ /* only codec-data for nice-and-clean au aligned packetized avc format */
+ if ((h264parse->format == GST_H264_PARSE_FORMAT_AVC
+ || h264parse->format == GST_H264_PARSE_FORMAT_AVC3)
+ && h264parse->align == GST_H264_PARSE_ALIGN_AU) {
+ buf = gst_h264_parse_make_codec_data (h264parse);
+ if (buf && h264parse->codec_data) {
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size != gst_buffer_get_size (h264parse->codec_data) ||
+ gst_buffer_memcmp (h264parse->codec_data, 0, map.data, map.size))
+ modified = TRUE;
+
+ gst_buffer_unmap (buf, &map);
+ } else {
+ if (!buf && h264parse->codec_data_in)
+ buf = gst_buffer_ref (h264parse->codec_data_in);
+ modified = TRUE;
+ }
+ }
+
+ caps = NULL;
+ if (G_UNLIKELY (!sps)) {
+ caps = gst_caps_copy (sink_caps);
+ } else {
+ gint crop_width, crop_height;
+ gint fps_num, fps_den;
+ gint par_n, par_d;
+ GstH264VUIParams *vui = &sps->vui_parameters;
+ gchar *colorimetry = NULL;
+
+ if (sps->frame_cropping_flag) {
+ crop_width = sps->crop_rect_width;
+ crop_height = sps->crop_rect_height;
+ } else {
+ crop_width = sps->width;
+ crop_height = sps->height;
+ }
+
+ if (G_UNLIKELY (h264parse->width != crop_width ||
+ h264parse->height != crop_height)) {
+ GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
+ crop_width, crop_height);
+ h264parse->width = crop_width;
+ h264parse->height = crop_height;
+ modified = TRUE;
+ }
+
+ /* 0/1 is set as the default in the codec parser, we will set
+ * it in case we have no info */
+ gst_h264_video_calculate_framerate (sps, h264parse->field_pic_flag,
+ h264parse->sei_pic_struct, &fps_num, &fps_den);
+ if (G_UNLIKELY (h264parse->fps_num != fps_num
+ || h264parse->fps_den != fps_den)) {
+ GST_DEBUG_OBJECT (h264parse, "framerate changed %d/%d", fps_num, fps_den);
+ h264parse->fps_num = fps_num;
+ h264parse->fps_den = fps_den;
+ modified = TRUE;
+ }
+
+ if (vui->aspect_ratio_info_present_flag) {
+ if (G_UNLIKELY ((h264parse->parsed_par_n != vui->par_n)
+ || (h264parse->parsed_par_d != vui->par_d))) {
+ h264parse->parsed_par_n = vui->par_n;
+ h264parse->parsed_par_d = vui->par_d;
+ GST_INFO_OBJECT (h264parse, "pixel aspect ratio has been changed %d/%d",
+ h264parse->parsed_par_n, h264parse->parsed_par_d);
+ modified = TRUE;
+ }
+ }
+
+ if (vui->video_signal_type_present_flag &&
+ vui->colour_description_present_flag) {
+ GstVideoColorimetry ci = { 0, };
+ gchar *old_colorimetry = NULL;
+
+ if (vui->video_full_range_flag)
+ ci.range = GST_VIDEO_COLOR_RANGE_0_255;
+ else
+ ci.range = GST_VIDEO_COLOR_RANGE_16_235;
+
+ ci.matrix = gst_video_color_matrix_from_iso (vui->matrix_coefficients);
+ ci.transfer =
+ gst_video_transfer_function_from_iso (vui->transfer_characteristics);
+ ci.primaries = gst_video_color_primaries_from_iso (vui->colour_primaries);
+
+ old_colorimetry =
+ gst_video_colorimetry_to_string (&h264parse->parsed_colorimetry);
+ colorimetry = gst_video_colorimetry_to_string (&ci);
+
+ if (colorimetry && g_strcmp0 (old_colorimetry, colorimetry)) {
+ GST_INFO_OBJECT (h264parse,
+ "colorimetry has been changed from %s to %s",
+ GST_STR_NULL (old_colorimetry), colorimetry);
+ h264parse->parsed_colorimetry = ci;
+ modified = TRUE;
+ }
+
+ g_free (old_colorimetry);
+ }
+
+ if (G_UNLIKELY (modified || h264parse->update_caps)) {
+ gint width, height;
+ GstClockTime latency = 0;
+
+ const gchar *caps_mview_mode = NULL;
+ GstVideoMultiviewMode mview_mode = h264parse->multiview_mode;
+ GstVideoMultiviewFlags mview_flags = h264parse->multiview_flags;
+ const gchar *chroma_format = NULL;
+ guint bit_depth_chroma;
+
+ fps_num = h264parse->fps_num;
+ fps_den = h264parse->fps_den;
+
+ caps = gst_caps_copy (sink_caps);
+
+ /* sps should give this but upstream overrides */
+ if (s && gst_structure_has_field (s, "width"))
+ gst_structure_get_int (s, "width", &width);
+ else
+ width = h264parse->width;
+
+ if (s && gst_structure_has_field (s, "height"))
+ gst_structure_get_int (s, "height", &height);
+ else
+ height = h264parse->height;
+
+ if (s == NULL ||
+ !gst_structure_get_fraction (s, "pixel-aspect-ratio", &par_n,
+ &par_d)) {
+ gst_h264_parse_get_par (h264parse, &par_n, &par_d);
+ if (par_n != 0 && par_d != 0) {
+ GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_n, par_d, NULL);
+ } else {
+ /* Assume par_n/par_d of 1/1 for calcs below, but don't set into caps */
+ par_n = par_d = 1;
+ }
+ }
+
+ /* Pass through or set output stereo/multiview config */
+ if (s && gst_structure_has_field (s, "multiview-mode")) {
+ caps_mview_mode = gst_structure_get_string (s, "multiview-mode");
+ gst_structure_get_flagset (s, "multiview-flags",
+ (guint *) & mview_flags, NULL);
+ } else if (mview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ if (gst_video_multiview_guess_half_aspect (mview_mode,
+ width, height, par_n, par_d)) {
+ mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ }
+
+ caps_mview_mode = gst_video_multiview_mode_to_caps_string (mview_mode);
+ gst_caps_set_simple (caps, "multiview-mode", G_TYPE_STRING,
+ caps_mview_mode, "multiview-flags",
+ GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, mview_flags,
+ GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, NULL);
+
+ /* upstream overrides */
+ if (s && gst_structure_has_field (s, "framerate")) {
+ gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
+ }
+
+ /* but not necessarily or reliably this */
+ if (fps_den > 0) {
+ GstStructure *s2;
+ gst_caps_set_simple (caps, "framerate",
+ GST_TYPE_FRACTION, fps_num, fps_den, NULL);
+ s2 = gst_caps_get_structure (caps, 0);
+ gst_structure_get_fraction (s2, "framerate", &h264parse->parsed_fps_n,
+ &h264parse->parsed_fps_d);
+
+ /* If we know the frame duration, and if we are not in one of the zero
+ * latency pattern, add one frame of latency */
+ if (fps_num > 0 && h264parse->in_align != GST_H264_PARSE_ALIGN_AU &&
+ !(h264parse->in_align == GST_H264_PARSE_ALIGN_NAL &&
+ h264parse->align == GST_H264_PARSE_ALIGN_NAL)) {
+ latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);
+ }
+
+ gst_base_parse_set_latency (GST_BASE_PARSE (h264parse), latency,
+ latency);
+ }
+
+ bit_depth_chroma = sps->bit_depth_chroma_minus8 + 8;
+
+ switch (sps->chroma_format_idc) {
+ case 0:
+ chroma_format = "4:0:0";
+ bit_depth_chroma = 0;
+ break;
+ case 1:
+ chroma_format = "4:2:0";
+ break;
+ case 2:
+ chroma_format = "4:2:2";
+ break;
+ case 3:
+ chroma_format = "4:4:4";
+ break;
+ default:
+ break;
+ }
+
+ if (chroma_format)
+ gst_caps_set_simple (caps,
+ "chroma-format", G_TYPE_STRING, chroma_format,
+ "bit-depth-luma", G_TYPE_UINT, sps->bit_depth_luma_minus8 + 8,
+ "bit-depth-chroma", G_TYPE_UINT, bit_depth_chroma, NULL);
+
+ if (colorimetry && (!s || !gst_structure_has_field (s, "colorimetry"))) {
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, colorimetry,
+ NULL);
+ }
+ }
+
+ g_free (colorimetry);
+ }
+
+ if (caps) {
+ const gchar *mdi_str = NULL;
+ const gchar *cll_str = NULL;
+ gboolean codec_data_modified = FALSE;
+
+ gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING,
+ gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
+ "alignment", G_TYPE_STRING,
+ gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
+
+ /* set profile and level in caps */
+ if (sps) {
+ const gchar *profile, *level;
+
+ profile = get_profile_string (sps);
+ if (profile != NULL)
+ gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL);
+
+ level = get_level_string (sps);
+ if (level != NULL)
+ gst_caps_set_simple (caps, "level", G_TYPE_STRING, level, NULL);
+
+ /* relax the profile constraint to find a suitable decoder */
+ ensure_caps_profile (h264parse, caps, sps);
+ }
+
+ if (s)
+ mdi_str = gst_structure_get_string (s, "mastering-display-info");
+ if (mdi_str) {
+ gst_caps_set_simple (caps, "mastering-display-info", G_TYPE_STRING,
+ mdi_str, NULL);
+ } else if (h264parse->mastering_display_info_state !=
+ GST_H264_PARSE_SEI_EXPIRED &&
+ !gst_video_mastering_display_info_add_to_caps
+ (&h264parse->mastering_display_info, caps)) {
+ GST_WARNING_OBJECT (h264parse,
+ "Couldn't set mastering display info to caps");
+ }
+
+ if (s)
+ cll_str = gst_structure_get_string (s, "content-light-level");
+ if (cll_str) {
+ gst_caps_set_simple (caps, "content-light-level", G_TYPE_STRING, cll_str,
+ NULL);
+ } else if (h264parse->content_light_level_state !=
+ GST_H264_PARSE_SEI_EXPIRED &&
+ !gst_video_content_light_level_add_to_caps
+ (&h264parse->content_light_level, caps)) {
+ GST_WARNING_OBJECT (h264parse,
+ "Couldn't set content light level to caps");
+ }
+
+ src_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
+
+ if (src_caps) {
+ GstStructure *src_caps_str = gst_caps_get_structure (src_caps, 0);
+
+ /* use codec data from old caps for comparison if we have pushed frame for now.
+ * we don't want to resend caps if everything is same except codec data.
+ * However, if the updated sps/pps is not in bitstream, we should put
+ * it on bitstream */
+ if (gst_structure_has_field (src_caps_str, "codec_data")) {
+ const GValue *codec_data_value =
+ gst_structure_get_value (src_caps_str, "codec_data");
+
+ if (!GST_VALUE_HOLDS_BUFFER (codec_data_value)) {
+ GST_WARNING_OBJECT (h264parse, "codec_data does not hold buffer");
+ } else if (!h264parse->first_frame) {
+ /* If there is no pushed frame before, we can update caps without worry.
+ * But updating codec_data in the middle of frames
+ * (especially on non-keyframe) might make downstream be confused.
+ * Therefore we are setting old codec data
+ * (i.e., was pushed to downstream previously) to new caps candidate
+ * here for gst_caps_is_strictly_equal() to be returned TRUE if only
+ * the codec_data is different, and to avoid re-sending caps it
+ * that case.
+ */
+ gst_caps_set_value (caps, "codec_data", codec_data_value);
+
+ /* check for codec_data update to re-send sps/pps inband data if
+ * current frame has no sps/pps but upstream codec_data was updated */
+ if ((!h264parse->have_sps_in_frame || !h264parse->have_pps_in_frame)
+ && buf) {
+ GstBuffer *codec_data_buf = gst_value_get_buffer (codec_data_value);
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size != gst_buffer_get_size (codec_data_buf) ||
+ gst_buffer_memcmp (codec_data_buf, 0, map.data, map.size)) {
+ codec_data_modified = TRUE;
+ }
+
+ gst_buffer_unmap (buf, &map);
+ }
+ }
+ } else if (!buf) {
+ GstStructure *s;
+ /* remove any left-over codec-data hanging around */
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_remove_field (s, "codec_data");
+ }
+ }
+
+ if (!(src_caps && gst_caps_is_strictly_equal (src_caps, caps))) {
+ /* update codec data to new value */
+ if (buf) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_replace (&h264parse->codec_data, buf);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ } else {
+ GstStructure *s;
+ /* remove any left-over codec-data hanging around */
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_remove_field (s, "codec_data");
+ gst_buffer_replace (&h264parse->codec_data, NULL);
+ }
+
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps);
+ } else if (codec_data_modified) {
+ GST_DEBUG_OBJECT (h264parse,
+ "Only codec_data is different, need inband sps/pps update");
+
+ /* this will insert updated codec_data with next idr */
+ h264parse->push_codec = TRUE;
+ }
+
+ if (src_caps)
+ gst_caps_unref (src_caps);
+ gst_caps_unref (caps);
+ }
+
+ gst_caps_unref (sink_caps);
+ if (buf)
+ gst_buffer_unref (buf);
+ }
+
+ static GstClockTime
+ gst_h264_parse_get_duration (GstH264Parse * h264parse, gboolean frame)
+ {
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+ GstH264SPS *sps = h264parse->nalparser->last_sps;
+ gint duration = 1;
+
+ if (!frame) {
+ GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
+ ret = 0;
+ goto done;
+ }
+
+ if (!sps) {
+ GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
+ goto fps_duration;
+ } else if (!sps->vui_parameters_present_flag) {
+ GST_DEBUG_OBJECT (h264parse, "unable to compute duration: VUI not present");
+ goto fps_duration;
+ } else if (!sps->vui_parameters.timing_info_present_flag) {
+ GST_DEBUG_OBJECT (h264parse,
+ "unable to compute duration: timing info not present");
+ goto fps_duration;
+ } else if (sps->vui_parameters.time_scale == 0) {
+ GST_DEBUG_OBJECT (h264parse,
+ "unable to compute duration: time_scale = 0 "
+ "(this is forbidden in spec; bitstream probably contains error)");
+ goto fps_duration;
+ }
+
+ if (h264parse->sei_pic_struct_pres_flag &&
+ h264parse->sei_pic_struct != (guint8) - 1) {
+ /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
+ * are ways to infer its value. This is related to computing the
+ * TopFieldOrderCnt and BottomFieldOrderCnt, which looks
+ * complicated and thus not implemented for the time being. Yet
+ * the value we have here is correct for many applications
+ */
+ switch (h264parse->sei_pic_struct) {
+ case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
+ duration = 1;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME:
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
+ duration = 2;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ duration = 3;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
+ duration = 4;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
+ duration = 6;
+ break;
+ default:
+ GST_DEBUG_OBJECT (h264parse,
+ "h264parse->sei_pic_struct of unknown value %d. Not parsed",
+ h264parse->sei_pic_struct);
+ break;
+ }
+ } else {
+ duration = h264parse->field_pic_flag ? 1 : 2;
+ }
+
+ GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
+
+ ret = gst_util_uint64_scale (duration * GST_SECOND,
+ sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
+ /* sanity check */
+ if (ret < GST_MSECOND) {
+ GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (ret));
+ goto fps_duration;
+ }
+
+ done:
+ return ret;
+
+ fps_duration:
+ if (h264parse->parsed_fps_d > 0 && h264parse->parsed_fps_n > 0)
+ ret =
+ gst_util_uint64_scale (GST_SECOND, h264parse->parsed_fps_d,
+ h264parse->parsed_fps_n);
+ goto done;
+ }
+
+ static void
+ gst_h264_parse_get_timestamp (GstH264Parse * h264parse,
+ GstClockTime * out_ts, GstClockTime * out_dur, gboolean frame)
+ {
+ GstH264SPS *sps = h264parse->nalparser->last_sps;
+ GstClockTime upstream;
+ gint duration = 1;
+
+ g_return_if_fail (out_dur != NULL);
+ g_return_if_fail (out_ts != NULL);
+
+ upstream = *out_ts;
+ GST_LOG_OBJECT (h264parse, "Upstream ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (upstream));
+
+ if (!frame) {
+ GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
+ *out_dur = 0;
+ goto exit;
+ }
+
+ if (!sps) {
+ GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
+ goto exit;
+ } else if (!sps->vui_parameters_present_flag) {
+ GST_DEBUG_OBJECT (h264parse,
+ "unable to compute timestamp: VUI not present");
+ goto exit;
+ } else if (!sps->vui_parameters.timing_info_present_flag) {
+ GST_DEBUG_OBJECT (h264parse,
+ "unable to compute timestamp: timing info not present");
+ goto exit;
+ } else if (sps->vui_parameters.time_scale == 0) {
+ GST_DEBUG_OBJECT (h264parse,
+ "unable to compute timestamp: time_scale = 0 "
+ "(this is forbidden in spec; bitstream probably contains error)");
+ goto exit;
+ }
+
+ if (h264parse->sei_pic_struct_pres_flag &&
+ h264parse->sei_pic_struct != (guint8) - 1) {
+ /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
+ * are ways to infer its value. This is related to computing the
+ * TopFieldOrderCnt and BottomFieldOrderCnt, which looks
+ * complicated and thus not implemented for the time being. Yet
+ * the value we have here is correct for many applications
+ */
+ switch (h264parse->sei_pic_struct) {
+ case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
+ duration = 1;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME:
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
+ duration = 2;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ duration = 3;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
+ duration = 4;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
+ duration = 6;
+ break;
+ default:
+ GST_DEBUG_OBJECT (h264parse,
+ "h264parse->sei_pic_struct of unknown value %d. Not parsed",
+ h264parse->sei_pic_struct);
+ break;
+ }
+ } else {
+ duration = h264parse->field_pic_flag ? 1 : 2;
+ }
+
+ GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
+
+ /*
+ * h264parse.264 C.1.2 Timing of coded picture removal (equivalent to DTS):
+ * Tr,n(0) = initial_cpb_removal_delay[ SchedSelIdx ] / 90000
+ * Tr,n(n) = Tr,n(nb) + Tc * cpb_removal_delay(n)
+ * where
+ * Tc = num_units_in_tick / time_scale
+ */
+
+ if (h264parse->ts_trn_nb != GST_CLOCK_TIME_NONE) {
+ GST_LOG_OBJECT (h264parse, "buffering based ts");
+ /* buffering period is present */
+ if (upstream != GST_CLOCK_TIME_NONE) {
+ /* If upstream timestamp is valid, we respect it and adjust current
+ * reference point */
+ h264parse->ts_trn_nb = upstream -
+ (GstClockTime) gst_util_uint64_scale
+ (h264parse->sei_cpb_removal_delay * GST_SECOND,
+ sps->vui_parameters.num_units_in_tick,
+ sps->vui_parameters.time_scale);
+ } else {
+ /* If no upstream timestamp is given, we write in new timestamp */
+ upstream = h264parse->dts = h264parse->ts_trn_nb +
+ (GstClockTime) gst_util_uint64_scale
+ (h264parse->sei_cpb_removal_delay * GST_SECOND,
+ sps->vui_parameters.num_units_in_tick,
+ sps->vui_parameters.time_scale);
+ }
+ } else {
+ GstClockTime dur;
+
+ GST_LOG_OBJECT (h264parse, "duration based ts");
+ /* naive method: no removal delay specified
+ * track upstream timestamp and provide best guess frame duration */
+ dur = gst_util_uint64_scale (duration * GST_SECOND,
+ sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
+ /* sanity check */
+ if (dur < GST_MSECOND) {
+ GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dur));
+ } else {
+ *out_dur = dur;
+ }
+ }
+
+ exit:
+ if (GST_CLOCK_TIME_IS_VALID (upstream))
+ *out_ts = h264parse->dts = upstream;
+
+ if (GST_CLOCK_TIME_IS_VALID (*out_dur) &&
+ GST_CLOCK_TIME_IS_VALID (h264parse->dts))
+ h264parse->dts += *out_dur;
+ }
+
+ static GstFlowReturn
+ gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+ {
+ GstH264Parse *h264parse;
+ GstBuffer *buffer;
+ guint av;
+
+ h264parse = GST_H264_PARSE (parse);
+ buffer = frame->buffer;
+
+ gst_h264_parse_update_src_caps (h264parse, NULL);
+
+ /* don't mess with timestamps if provided by upstream,
+ * particularly since our ts not that good they handle seeking etc */
+ if (h264parse->do_ts) {
+ gst_h264_parse_get_timestamp (h264parse,
+ &GST_BUFFER_DTS (buffer), &GST_BUFFER_DURATION (buffer),
+ h264parse->frame_start);
+ }
+
+ /* We don't want to let baseparse select a duration itself based
+ * solely on the framerate, as we have more per-frame information
+ * available */
+ if (!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
+ GST_BUFFER_DURATION (buffer) =
+ gst_h264_parse_get_duration (h264parse, h264parse->frame_start);
+ }
+
+ if (h264parse->keyframe)
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (h264parse->discard_bidirectional && h264parse->bidirectional)
+ goto discard;
+
+ if (h264parse->header)
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER);
+ else
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_HEADER);
+
+ if (h264parse->discont) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ h264parse->discont = FALSE;
+ }
+
+ if (h264parse->marker) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_MARKER);
+ h264parse->marker = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_MARKER);
+ }
+
+ /* replace with transformed AVC output if applicable */
+ av = gst_adapter_available (h264parse->frame_out);
+ if (av) {
+ GstBuffer *buf;
+
+ buf = gst_adapter_take_buffer (h264parse->frame_out, av);
+ gst_buffer_copy_into (buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ gst_buffer_replace (&frame->out_buffer, buf);
+ gst_buffer_unref (buf);
+ }
+
+ done:
+ return GST_FLOW_OK;
+
+ discard:
+ GST_DEBUG_OBJECT (h264parse, "Discarding bidirectional frame");
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
+ gst_h264_parse_reset_frame (h264parse);
+ goto done;
+ }
+
+ /* sends a codec NAL downstream, decorating and transforming as needed.
+ * No ownership is taken of @nal */
+ static GstFlowReturn
+ gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse,
+ GstBuffer * nal, GstBuffer * buffer)
+ {
+ GstMapInfo map;
+ GstBuffer *wrapped_nal;
+
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+ wrapped_nal = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
+ map.data, map.size);
+ gst_buffer_unmap (nal, &map);
+
+ GST_BUFFER_PTS (wrapped_nal) = GST_BUFFER_PTS (buffer);
+ GST_BUFFER_DTS (wrapped_nal) = GST_BUFFER_DTS (buffer);
+ GST_BUFFER_DURATION (wrapped_nal) = 0;
+
+ return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), wrapped_nal);
+ }
+
+ static GstEvent *
+ check_pending_key_unit_event (GstEvent * pending_event,
+ GstSegment * segment, GstClockTime timestamp, guint flags,
+ GstClockTime pending_key_unit_ts)
+ {
+ GstClockTime running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *event = NULL;
+
+ g_return_val_if_fail (segment != NULL, NULL);
+
+ if (pending_event == NULL)
+ goto out;
+
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ timestamp == GST_CLOCK_TIME_NONE)
+ goto out;
+
+ running_time = gst_segment_to_running_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
+ if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
+ running_time < pending_key_unit_ts)
+ goto out;
+
+ if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
+ GST_DEBUG ("pending force key unit, waiting for keyframe");
+ goto out;
+ }
+
+ stream_time = gst_segment_to_stream_time (segment,
+ GST_FORMAT_TIME, timestamp);
+
+ if (!gst_video_event_parse_upstream_force_key_unit (pending_event,
+ NULL, &all_headers, &count)) {
+ gst_video_event_parse_downstream_force_key_unit (pending_event, NULL,
+ NULL, NULL, &all_headers, &count);
+ }
+
+ event =
+ gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
+ running_time, all_headers, count);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
+
+ out:
+ return event;
+ }
+
+ static void
+ gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
+ {
+ GstClockTime running_time;
+ guint count;
+ #ifndef GST_DISABLE_GST_DEBUG
+ gboolean have_sps, have_pps;
+ gint i;
+ #endif
+
+ parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
+ gst_event_replace (&parse->force_key_unit_event, NULL);
+
+ gst_video_event_parse_downstream_force_key_unit (event,
+ NULL, NULL, &running_time, NULL, &count);
+
+ GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
+ "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), count);
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ have_sps = have_pps = FALSE;
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if (parse->sps_nals[i] != NULL) {
+ have_sps = TRUE;
+ break;
+ }
+ }
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if (parse->pps_nals[i] != NULL) {
+ have_pps = TRUE;
+ break;
+ }
+ }
+
+ GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
+ have_sps, have_pps);
+ #endif
+
+ /* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
+ parse->push_codec = TRUE;
+ }
+
+ static gboolean
+ gst_h264_parse_handle_sps_pps_nals (GstH264Parse * h264parse,
+ GstBuffer * buffer, GstBaseParseFrame * frame)
+ {
+ GstBuffer *codec_nal;
+ gint i;
+ gboolean send_done = FALSE;
+
+ if (h264parse->have_sps_in_frame && h264parse->have_pps_in_frame) {
+ GST_DEBUG_OBJECT (h264parse, "SPS/PPS exist in frame, will not insert");
+ return TRUE;
+ }
+
+ if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
+ /* send separate config NAL buffers */
+ GST_DEBUG_OBJECT (h264parse, "- sending SPS/PPS");
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if ((codec_nal = h264parse->sps_nals[i])) {
+ GST_DEBUG_OBJECT (h264parse, "sending SPS nal");
+ gst_h264_parse_push_codec_buffer (h264parse, codec_nal, buffer);
+ send_done = TRUE;
+ }
+ }
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if ((codec_nal = h264parse->pps_nals[i])) {
+ GST_DEBUG_OBJECT (h264parse, "sending PPS nal");
+ gst_h264_parse_push_codec_buffer (h264parse, codec_nal, buffer);
+ send_done = TRUE;
+ }
+ }
+ } else {
+ /* insert config NALs into AU */
+ GstByteWriter bw;
+ GstBuffer *new_buf;
+ const gboolean bs = h264parse->format == GST_H264_PARSE_FORMAT_BYTE;
+ const gint nls = 4 - h264parse->nal_length_size;
+ gboolean ok;
+
+ gst_byte_writer_init_with_size (&bw, gst_buffer_get_size (buffer), FALSE);
+ ok = gst_byte_writer_put_buffer (&bw, buffer, 0, h264parse->idr_pos);
+ GST_DEBUG_OBJECT (h264parse, "- inserting SPS/PPS");
+ for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
+ if ((codec_nal = h264parse->sps_nals[i])) {
+ gsize nal_size = gst_buffer_get_size (codec_nal);
+ GST_DEBUG_OBJECT (h264parse, "inserting SPS nal");
+ if (bs) {
+ ok &= gst_byte_writer_put_uint32_be (&bw, 1);
+ } else {
+ ok &= gst_byte_writer_put_uint32_be (&bw, (nal_size << (nls * 8)));
+ ok &= gst_byte_writer_set_pos (&bw,
+ gst_byte_writer_get_pos (&bw) - nls);
+ }
+
+ ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
+ send_done = TRUE;
+ }
+ }
+ for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
+ if ((codec_nal = h264parse->pps_nals[i])) {
+ gsize nal_size = gst_buffer_get_size (codec_nal);
+ GST_DEBUG_OBJECT (h264parse, "inserting PPS nal");
+ if (bs) {
+ ok &= gst_byte_writer_put_uint32_be (&bw, 1);
+ } else {
+ ok &= gst_byte_writer_put_uint32_be (&bw, (nal_size << (nls * 8)));
+ ok &= gst_byte_writer_set_pos (&bw,
+ gst_byte_writer_get_pos (&bw) - nls);
+ }
+ ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
+ send_done = TRUE;
+ }
+ }
+ ok &= gst_byte_writer_put_buffer (&bw, buffer, h264parse->idr_pos, -1);
+ /* collect result and push */
+ new_buf = gst_byte_writer_reset_and_get_buffer (&bw);
+ gst_buffer_copy_into (new_buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ /* should already be keyframe/IDR, but it may not have been,
+ * so mark it as such to avoid being discarded by picky decoder */
+ GST_BUFFER_FLAG_UNSET (new_buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ gst_buffer_replace (&frame->out_buffer, new_buf);
+ gst_buffer_unref (new_buf);
+ /* some result checking seems to make some compilers happy */
+ if (G_UNLIKELY (!ok)) {
+ GST_ERROR_OBJECT (h264parse, "failed to insert SPS/PPS");
+ }
+ }
+
+ return send_done;
+ }
+
+ static GstBuffer *
+ gst_h264_parse_create_pic_timing_sei (GstH264Parse * h264parse,
+ GstBuffer * buffer)
+ {
+ guint num_meta;
+ const guint8 num_clock_ts_table[9] = {
+ 1, 1, 1, 2, 2, 3, 3, 2, 3
+ };
+ guint num_clock_ts;
+ GstBuffer *out_buf = NULL;
+ GstMemory *sei_mem;
+ GArray *msg_array;
+ gint i, j;
+ GstH264SEIMessage sei;
+ GstH264PicTiming *pic_timing;
+ GstVideoTimeCodeMeta *tc_meta;
+ gpointer iter = NULL;
+ guint8 ct_type = GST_H264_CT_TYPE_PROGRESSIVE;
+
+ if (!h264parse->update_timecode)
+ return NULL;
+
+ num_meta = gst_buffer_get_n_meta (buffer, GST_VIDEO_TIME_CODE_META_API_TYPE);
+ if (num_meta == 0)
+ return NULL;
+
+ if (!h264parse->sei_pic_struct_pres_flag || h264parse->pic_timing_sei_pos < 0) {
+ GST_ELEMENT_WARNING (h264parse, STREAM, NOT_IMPLEMENTED, (NULL),
+ ("timecode update was requested but VUI doesn't support timecode"));
+ return NULL;
+ }
+
+ g_assert (h264parse->sei_pic_struct <=
+ GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING);
+
+ num_clock_ts = num_clock_ts_table[h264parse->sei_pic_struct];
+
+ if (num_meta > num_clock_ts) {
+ GST_LOG_OBJECT (h264parse,
+ "The number of timecode meta %d is superior to required %d",
+ num_meta, num_clock_ts);
+
+ return NULL;
+ }
+
+ GST_LOG_OBJECT (h264parse,
+ "The number of timecode meta %d is compatible", num_meta);
+
+ memset (&sei, 0, sizeof (GstH264SEIMessage));
+ sei.payloadType = GST_H264_SEI_PIC_TIMING;
+ memcpy (&sei.payload.pic_timing,
+ &h264parse->pic_timing_sei, sizeof (GstH264PicTiming));
+
+ pic_timing = &sei.payload.pic_timing;
+
+ switch (h264parse->sei_pic_struct) {
+ case GST_H264_SEI_PIC_STRUCT_FRAME:
+ case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
+ case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
+ ct_type = GST_H264_CT_TYPE_PROGRESSIVE;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ ct_type = GST_H264_CT_TYPE_INTERLACED;
+ break;
+ default:
+ ct_type = GST_H264_CT_TYPE_UNKNOWN;
+ break;
+ }
+
+ i = 0;
+ while ((tc_meta =
+ (GstVideoTimeCodeMeta *) gst_buffer_iterate_meta_filtered (buffer,
+ &iter, GST_VIDEO_TIME_CODE_META_API_TYPE))) {
+ GstH264ClockTimestamp *tim = &pic_timing->clock_timestamp[i];
+ GstVideoTimeCode *tc = &tc_meta->tc;
+
+ pic_timing->clock_timestamp_flag[i] = 1;
+ tim->ct_type = ct_type;
+ tim->nuit_field_based_flag = 1;
+ tim->counting_type = 0;
+
+ if ((tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
+ == GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
+ tim->counting_type = 4;
+
+ tim->discontinuity_flag = 0;
+ tim->cnt_dropped_flag = 0;
+ tim->n_frames = tc->frames;
+
+ tim->hours_value = tc->hours;
+ tim->minutes_value = tc->minutes;
+ tim->seconds_value = tc->seconds;
+
+ tim->full_timestamp_flag =
+ tim->seconds_flag = tim->minutes_flag = tim->hours_flag = 0;
+
+ if (tc->hours > 0)
+ tim->full_timestamp_flag = 1;
+ else if (tc->minutes > 0)
+ tim->seconds_flag = tim->minutes_flag = 1;
+ else if (tc->seconds > 0)
+ tim->seconds_flag = 1;
+
+ GST_LOG_OBJECT (h264parse,
+ "New time code value %02u:%02u:%02u:%02u",
+ tim->hours_value, tim->minutes_value, tim->seconds_value,
+ tim->n_frames);
+
+ i++;
+ }
+
+ for (j = i; j < 3; j++)
+ pic_timing->clock_timestamp_flag[j] = 0;
+
+ msg_array = g_array_new (FALSE, FALSE, sizeof (GstH264SEIMessage));
+ g_array_set_clear_func (msg_array, (GDestroyNotify) gst_h264_sei_clear);
+
+ g_array_append_val (msg_array, sei);
+ if (h264parse->format == GST_H264_PARSE_FORMAT_BYTE) {
+ sei_mem = gst_h264_create_sei_memory (3, msg_array);
+ } else {
+ sei_mem = gst_h264_create_sei_memory_avc (h264parse->nal_length_size,
+ msg_array);
+ }
+ g_array_unref (msg_array);
+
+ if (!sei_mem) {
+ GST_WARNING_OBJECT (h264parse, "Cannot create Picture Timing SEI memory");
+ return NULL;
+ }
+
+ out_buf = gst_buffer_new ();
+ gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+
+ if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
+ gst_buffer_append_memory (out_buf, sei_mem);
+ } else {
+ gsize mem_size;
+
+ mem_size = gst_memory_get_sizes (sei_mem, NULL, NULL);
+
+ /* copy every data except for the SEI */
+ if (h264parse->pic_timing_sei_pos > 0) {
+ gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_MEMORY, 0,
+ h264parse->pic_timing_sei_pos);
+ }
+
+ /* insert new SEI */
+ gst_buffer_append_memory (out_buf, sei_mem);
+
+ if (gst_buffer_get_size (buffer) >
+ h264parse->pic_timing_sei_pos + h264parse->pic_timing_sei_size) {
+ gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_MEMORY,
+ h264parse->pic_timing_sei_pos + h264parse->pic_timing_sei_size, -1);
+ }
+
+ if (h264parse->idr_pos >= 0) {
+ h264parse->idr_pos += mem_size;
+ h264parse->idr_pos -= h264parse->pic_timing_sei_size;
+ }
+ }
+
+ return out_buf;
+ }
+
+ static GstFlowReturn
+ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+ {
+ GstH264Parse *h264parse;
+ GstBuffer *buffer;
+ GstBuffer *new_buf;
+ GstEvent *event;
+ GstBuffer *parse_buffer = NULL;
+ gboolean is_interlaced = FALSE;
+
+ h264parse = GST_H264_PARSE (parse);
+
+ if (h264parse->first_frame) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (caps == NULL) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (h264parse))) {
+ GST_INFO_OBJECT (h264parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (h264parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_VIDEO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ h264parse->first_frame = FALSE;
+ }
+
+ /* In case of byte-stream, insert au delimiter by default
+ * if it doesn't exist */
+ if (h264parse->aud_insert && h264parse->format == GST_H264_PARSE_FORMAT_BYTE) {
+ GST_DEBUG_OBJECT (h264parse, "Inserting AUD into the stream.");
+ if (h264parse->align == GST_H264_PARSE_ALIGN_AU) {
+ GstMemory *mem =
+ gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (guint8 *) au_delim,
+ sizeof (au_delim), 0, sizeof (au_delim), NULL, NULL);
+
+ frame->out_buffer = gst_buffer_copy (frame->buffer);
+ gst_buffer_prepend_memory (frame->out_buffer, mem);
+ if (h264parse->idr_pos >= 0)
+ h264parse->idr_pos += sizeof (au_delim);
+
+ buffer = frame->out_buffer;
+ } else {
+ GstBuffer *aud_buffer = gst_buffer_new_allocate (NULL, 2, NULL);
+ gst_buffer_fill (aud_buffer, 0, (guint8 *) (au_delim + 4), 2);
+
+ buffer = frame->buffer;
+ gst_h264_parse_push_codec_buffer (h264parse, aud_buffer, buffer);
+ gst_buffer_unref (aud_buffer);
+ }
+ } else {
+ buffer = frame->buffer;
+ }
+ h264parse->aud_insert = FALSE;
+
+ if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
+ &parse->segment, GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
+ gst_h264_parse_prepare_key_unit (h264parse, event);
+ }
+
+ /* handle timecode */
+ new_buf = gst_h264_parse_create_pic_timing_sei (h264parse, buffer);
+ if (new_buf) {
+ if (frame->out_buffer)
+ gst_buffer_unref (frame->out_buffer);
+
+ buffer = frame->out_buffer = new_buf;
+ }
+
+ /* periodic SPS/PPS sending */
+ if (h264parse->interval > 0 || h264parse->push_codec) {
+ GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ guint64 diff;
+ gboolean initial_frame = FALSE;
+
+ /* init */
+ if (!GST_CLOCK_TIME_IS_VALID (h264parse->last_report)) {
+ h264parse->last_report = timestamp;
+ initial_frame = TRUE;
+ }
+
+ if (h264parse->idr_pos >= 0) {
+ GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
+
+ if (timestamp > h264parse->last_report)
+ diff = timestamp - h264parse->last_report;
+ else
+ diff = 0;
+
+ GST_LOG_OBJECT (h264parse,
+ "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp), GST_TIME_ARGS (h264parse->last_report));
+
+ GST_DEBUG_OBJECT (h264parse,
+ "interval since last SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (diff));
+
+ if (GST_TIME_AS_SECONDS (diff) >= h264parse->interval ||
+ initial_frame || h264parse->push_codec) {
+ GstClockTime new_ts;
+
+ /* avoid overwriting a perfectly fine timestamp */
+ new_ts = GST_CLOCK_TIME_IS_VALID (timestamp) ? timestamp :
+ h264parse->last_report;
+
+ if (gst_h264_parse_handle_sps_pps_nals (h264parse, buffer, frame)) {
+ h264parse->last_report = new_ts;
+ }
+ }
+ /* we pushed whatever we had */
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
+ }
+ } else if (h264parse->interval == -1) {
+ if (h264parse->idr_pos >= 0) {
+ GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
+
+ gst_h264_parse_handle_sps_pps_nals (h264parse, buffer, frame);
+
+ /* we pushed whatever we had */
+ h264parse->push_codec = FALSE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
+ }
+ }
+
+ /* Fixme: setting passthrough mode causing multiple issues:
+ * For nal aligned multiresoluton streams, passthrough mode make h264parse
+ * unable to advertise the new resolutions. Also causing issues while
+ * parsing MVC streams when it has two layers.
+ * Disabing passthourgh mode for now */
+ #if 0
+ /* If SPS/PPS and a keyframe have been parsed, and we're not converting,
+ * we might switch to passthrough mode now on the basis that we've seen
+ * the SEI packets and know optional caps params (such as multiview).
+ * This is an efficiency optimisation that relies on stream properties
+ * remaining uniform in practice. */
+ if (h264parse->can_passthrough) {
+ if (h264parse->keyframe && h264parse->have_sps && h264parse->have_pps) {
+ GST_LOG_OBJECT (parse, "Switching to passthrough mode");
+ gst_base_parse_set_passthrough (parse, TRUE);
+ }
+ }
+ #endif
+
+ if (frame->out_buffer) {
+ parse_buffer = frame->out_buffer =
+ gst_buffer_make_writable (frame->out_buffer);
+ } else {
+ parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer);
+ }
+
+ if (!gst_buffer_get_video_time_code_meta (parse_buffer)) {
+ guint i = 0;
+
+ for (i = 0; i < 3 && h264parse->num_clock_timestamp; i++) {
+ GstH264ClockTimestamp *tim =
+ &h264parse->pic_timing_sei.clock_timestamp[i];
+ gint field_count = -1;
+ guint n_frames;
+ GstVideoTimeCodeFlags flags = 0;
+
+ if (!h264parse->pic_timing_sei.clock_timestamp_flag[i])
+ continue;
+
+ /* Table D-1 */
+ switch (h264parse->sei_pic_struct) {
+ case GST_H264_SEI_PIC_STRUCT_FRAME:
+ case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
+ field_count = h264parse->sei_pic_struct;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
+ field_count = i + 1;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
+ field_count = 2 - i;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ field_count = i % 2 ? 2 : 1;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ field_count = i % 2 ? 1 : 2;
+ break;
+ case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
+ case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
+ field_count = 0;
+ break;
+ }
+
+ if (field_count == -1) {
+ GST_WARNING_OBJECT (parse,
+ "failed to determine field count for timecode");
+ field_count = 0;
+ }
+
+ /* dropping of the two lowest (value 0 and 1) n_frames
+ * counts when seconds_value is equal to 0 and
+ * minutes_value is not an integer multiple of 10 */
+ if (tim->counting_type == 4)
+ flags |= GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME;
+
+ if (tim->ct_type == GST_H264_CT_TYPE_INTERLACED) {
+ flags |= GST_VIDEO_TIME_CODE_FLAGS_INTERLACED;
+ is_interlaced = TRUE;
+ }
+
+ n_frames =
+ gst_util_uint64_scale_int (tim->n_frames, 1,
+ 2 - tim->nuit_field_based_flag);
+
+ GST_LOG_OBJECT (h264parse,
+ "Add time code meta %02u:%02u:%02u:%02u",
+ tim->hours_value, tim->minutes_value, tim->seconds_value, n_frames);
+
+ gst_buffer_add_video_time_code_meta_full (parse_buffer,
+ h264parse->parsed_fps_n,
+ h264parse->parsed_fps_d,
+ NULL,
+ flags,
+ tim->hours_flag ? tim->hours_value : 0,
+ tim->minutes_flag ? tim->minutes_value : 0,
+ tim->seconds_flag ? tim->seconds_value : 0, n_frames, field_count);
+ }
+
+ h264parse->num_clock_timestamp = 0;
+ }
+
+ if (is_interlaced) {
+ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ if (h264parse->sei_pic_struct == GST_H264_SEI_PIC_STRUCT_TOP_FIELD)
+ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
+ }
+
+ gst_video_push_user_data ((GstElement *) h264parse, &h264parse->user_data,
+ parse_buffer);
+
+ gst_h264_parse_reset_frame (h264parse);
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
+ {
+ GstH264Parse *h264parse;
+ GstStructure *str;
+ const GValue *codec_data_value;
+ GstBuffer *codec_data = NULL;
+ gsize size;
+ guint format, align, off;
+ GstH264NalUnit nalu;
+ GstH264ParserResult parseres;
+ GstCaps *old_caps;
+
+ h264parse = GST_H264_PARSE (parse);
+
+ /* reset */
+ h264parse->push_codec = FALSE;
+
+ old_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (old_caps) {
+ if (!gst_caps_is_equal (old_caps, caps))
+ gst_h264_parse_reset_stream_info (h264parse);
+ gst_caps_unref (old_caps);
+ }
+
+ str = gst_caps_get_structure (caps, 0);
+
+ /* accept upstream info if provided */
+ gst_structure_get_int (str, "width", &h264parse->width);
+ gst_structure_get_int (str, "height", &h264parse->height);
+ gst_structure_get_fraction (str, "framerate", &h264parse->fps_num,
+ &h264parse->fps_den);
+ gst_structure_get_fraction (str, "pixel-aspect-ratio",
+ &h264parse->upstream_par_n, &h264parse->upstream_par_d);
+
+ /* get upstream format and align from caps */
+ gst_h264_parse_format_from_caps (caps, &format, &align);
+
+ codec_data_value = gst_structure_get_value (str, "codec_data");
+
+ /* fix up caps without stream-format for max. backwards compatibility */
+ if (format == GST_H264_PARSE_FORMAT_NONE) {
+ /* codec_data implies avc */
+ if (codec_data_value != NULL) {
+ GST_ERROR ("video/x-h264 caps with codec_data but no stream-format=avc");
+ format = GST_H264_PARSE_FORMAT_AVC;
+ } else {
+ /* otherwise assume bytestream input */
+ GST_ERROR ("video/x-h264 caps without codec_data or stream-format");
+ format = GST_H264_PARSE_FORMAT_BYTE;
+ }
+ }
+
+ /* avc caps sanity checks */
+ if (format == GST_H264_PARSE_FORMAT_AVC) {
+ /* AVC requires codec_data, AVC3 might have one and/or SPS/PPS inline */
+ if (codec_data_value == NULL)
+ goto avc_caps_codec_data_missing;
+
+ /* AVC implies alignment=au, everything else is not allowed */
+ if (align == GST_H264_PARSE_ALIGN_NONE)
+ align = GST_H264_PARSE_ALIGN_AU;
+ else if (align != GST_H264_PARSE_ALIGN_AU)
+ goto avc_caps_wrong_alignment;
+ }
+
+ /* bytestream caps sanity checks */
+ if (format == GST_H264_PARSE_FORMAT_BYTE) {
+ /* should have SPS/PSS in-band (and/or oob in streamheader field) */
+ if (codec_data_value != NULL)
+ goto bytestream_caps_with_codec_data;
+ }
+
+ /* packetized video has codec_data (required for AVC, optional for AVC3) */
+ if (codec_data_value != NULL) {
+ GstMapInfo map;
+ guint8 *data;
+ guint num_sps, num_pps;
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint profile;
+ #endif
+ gint i;
+
+ GST_DEBUG_OBJECT (h264parse, "have packetized h264");
+ /* make note for optional split processing */
+ h264parse->packetized = TRUE;
+
+ /* codec_data field should hold a buffer */
+ if (!GST_VALUE_HOLDS_BUFFER (codec_data_value))
+ goto avc_caps_codec_data_wrong_type;
+
+ codec_data = gst_value_get_buffer (codec_data_value);
+ if (!codec_data)
+ goto avc_caps_codec_data_missing;
+ gst_buffer_map (codec_data, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* parse the avcC data */
+ if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */
+ gst_buffer_unmap (codec_data, &map);
+ goto avcc_too_small;
+ }
+ /* parse the version, this must be 1 */
+ if (data[0] != 1) {
+ gst_buffer_unmap (codec_data, &map);
+ goto wrong_version;
+ }
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* AVCProfileIndication */
+ /* profile_compat */
+ /* AVCLevelIndication */
+ profile = (data[1] << 16) | (data[2] << 8) | data[3];
+ GST_DEBUG_OBJECT (h264parse, "profile %06x", profile);
+ #endif
+
+ /* 6 bits reserved | 2 bits lengthSizeMinusOne */
+ /* this is the number of bytes in front of the NAL units to mark their
+ * length */
+ h264parse->nal_length_size = (data[4] & 0x03) + 1;
+ GST_DEBUG_OBJECT (h264parse, "nal length size %u",
+ h264parse->nal_length_size);
+
+ num_sps = data[5] & 0x1f;
+ off = 6;
+ for (i = 0; i < num_sps; i++) {
+ parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
+ data, off, size, 2, &nalu);
+ if (parseres != GST_H264_PARSER_OK) {
+ gst_buffer_unmap (codec_data, &map);
+ goto avcc_too_small;
+ }
+
+ gst_h264_parse_process_nal (h264parse, &nalu);
+ off = nalu.offset + nalu.size;
+ }
+
+ if (off >= size) {
+ gst_buffer_unmap (codec_data, &map);
+ goto avcc_too_small;
+ }
+ num_pps = data[off];
+ off++;
+
+ for (i = 0; i < num_pps; i++) {
+ parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
+ data, off, size, 2, &nalu);
+ if (parseres != GST_H264_PARSER_OK) {
+ gst_buffer_unmap (codec_data, &map);
+ goto avcc_too_small;
+ }
+
+ gst_h264_parse_process_nal (h264parse, &nalu);
+ off = nalu.offset + nalu.size;
+ }
+
+ gst_buffer_unmap (codec_data, &map);
+
+ gst_buffer_replace (&h264parse->codec_data_in, codec_data);
+
+ /* don't confuse codec_data with inband sps/pps */
+ h264parse->have_sps_in_frame = FALSE;
+ h264parse->have_pps_in_frame = FALSE;
+ } else if (format == GST_H264_PARSE_FORMAT_BYTE) {
+ GST_DEBUG_OBJECT (h264parse, "have bytestream h264");
+ /* nothing to pre-process */
+ h264parse->packetized = FALSE;
+ /* we have 4 sync bytes */
+ h264parse->nal_length_size = 4;
+ } else {
+ /* probably AVC3 without codec_data field, anything to do here? */
+ }
+
+ {
+ GstCaps *in_caps;
+
+ /* prefer input type determined above */
+ in_caps = gst_caps_new_simple ("video/x-h264",
+ "parsed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING,
+ gst_h264_parse_get_string (h264parse, TRUE, format),
+ "alignment", G_TYPE_STRING,
+ gst_h264_parse_get_string (h264parse, FALSE, align), NULL);
+ /* negotiate with downstream, sets ->format and ->align */
+ gst_h264_parse_negotiate (h264parse, format, in_caps);
+ gst_caps_unref (in_caps);
+ }
+
+ if (format == h264parse->format && align == h264parse->align) {
+ /* we did parse codec-data and might supplement src caps */
+ gst_h264_parse_update_src_caps (h264parse, caps);
+ } else if (format == GST_H264_PARSE_FORMAT_AVC
+ || format == GST_H264_PARSE_FORMAT_AVC3) {
+ /* if input != output, and input is avc, must split before anything else */
+ /* arrange to insert codec-data in-stream if needed.
+ * src caps are only arranged for later on */
+ h264parse->push_codec = TRUE;
+ h264parse->have_sps = FALSE;
+ h264parse->have_pps = FALSE;
+ if (h264parse->align == GST_H264_PARSE_ALIGN_NAL)
+ h264parse->split_packetized = TRUE;
+ h264parse->packetized = TRUE;
+ }
+
+ h264parse->in_align = align;
+
+ return TRUE;
+
+ /* ERRORS */
+ avc_caps_codec_data_wrong_type:
+ {
+ GST_WARNING_OBJECT (parse, "H.264 AVC caps, codec_data field not a buffer");
+ goto refuse_caps;
+ }
+ avc_caps_codec_data_missing:
+ {
+ GST_WARNING_OBJECT (parse, "H.264 AVC caps, but no codec_data");
+ goto refuse_caps;
+ }
+ avc_caps_wrong_alignment:
+ {
+ GST_WARNING_OBJECT (parse, "H.264 AVC caps with NAL alignment, must be AU");
+ goto refuse_caps;
+ }
+ bytestream_caps_with_codec_data:
+ {
+ GST_WARNING_OBJECT (parse, "H.264 bytestream caps with codec_data is not "
+ "expected, send SPS/PPS in-band with data or in streamheader field");
+ goto refuse_caps;
+ }
+ avcc_too_small:
+ {
+ GST_DEBUG_OBJECT (h264parse, "avcC size %" G_GSIZE_FORMAT " < 8", size);
+ goto refuse_caps;
+ }
+ wrong_version:
+ {
+ GST_DEBUG_OBJECT (h264parse, "wrong avcC version");
+ goto refuse_caps;
+ }
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (h264parse, "refused caps %" GST_PTR_FORMAT, caps);
+ return FALSE;
+ }
+ }
+
+ static void
+ remove_fields (GstCaps * caps, gboolean all)
+ {
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (all) {
+ gst_structure_remove_field (s, "alignment");
+ gst_structure_remove_field (s, "stream-format");
+ }
+ gst_structure_remove_field (s, "parsed");
+ }
+ }
+
+ static GstCaps *
+ gst_h264_parse_get_caps (GstBaseParse * parse, GstCaps * filter)
+ {
+ GstCaps *peercaps, *templ;
+ GstCaps *res, *tmp, *pcopy;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy, TRUE);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ pcopy = gst_caps_copy (peercaps);
+ remove_fields (pcopy, TRUE);
+
+ res = gst_caps_intersect_full (pcopy, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (pcopy);
+ gst_caps_unref (templ);
+
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (res, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = tmp;
+ }
+
+ /* Try if we can put the downstream caps first */
+ pcopy = gst_caps_copy (peercaps);
+ remove_fields (pcopy, FALSE);
+ tmp = gst_caps_intersect_full (pcopy, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (pcopy);
+ if (!gst_caps_is_empty (tmp))
+ res = gst_caps_merge (tmp, res);
+ else
+ gst_caps_unref (tmp);
+
+ gst_caps_unref (peercaps);
+ return res;
+ }
+
+ static gboolean
+ gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
+ {
+ gboolean res;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, stream_time, running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_is_force_key_unit (event)) {
+ gst_video_event_parse_downstream_force_key_unit (event,
+ ×tamp, &stream_time, &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse,
+ "received downstream force key unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT
+ " all_headers %d count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), all_headers, count);
+ if (h264parse->force_key_unit_event) {
+ GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
+ "as one is already queued");
+ } else {
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ }
+ gst_event_unref (event);
+ res = TRUE;
+ } else {
+ res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
+ break;
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ case GST_EVENT_SEGMENT_DONE:
+ h264parse->dts = GST_CLOCK_TIME_NONE;
+ h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
+ h264parse->push_codec = TRUE;
+
+ res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+ /* don't try to mess with more subtle cases (e.g. seek) */
+ if (segment->format == GST_FORMAT_TIME &&
+ (segment->start != 0 || segment->rate != 1.0
+ || segment->applied_rate != 1.0))
+ h264parse->do_ts = FALSE;
+
+ if (segment->flags & GST_SEEK_FLAG_TRICKMODE_FORWARD_PREDICTED) {
+ GST_DEBUG_OBJECT (h264parse, "Will discard bidirectional frames");
+ h264parse->discard_bidirectional = TRUE;
+ }
+
+
+ h264parse->last_report = GST_CLOCK_TIME_NONE;
+
+ res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
+ break;
+ }
+ default:
+ res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
+ break;
+ }
+ return res;
+ }
+
+ static gboolean
+ gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
+ {
+ gboolean res;
+ GstH264Parse *h264parse = GST_H264_PARSE (parse);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ GstClockTime running_time;
+ gboolean all_headers;
+ guint count;
+
+ if (gst_video_event_is_force_key_unit (event)) {
+ gst_video_event_parse_upstream_force_key_unit (event,
+ &running_time, &all_headers, &count);
+
+ GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
+ "seqnum %d running_time %" GST_TIME_FORMAT
+ " all_headers %d count %d", gst_event_get_seqnum (event),
+ GST_TIME_ARGS (running_time), all_headers, count);
+
+ if (all_headers) {
+ h264parse->pending_key_unit_ts = running_time;
+ gst_event_replace (&h264parse->force_key_unit_event, event);
+ }
+ }
+ res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ break;
+ }
+ default:
+ res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static void
+ gst_h264_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstH264Parse *parse;
+
+ parse = GST_H264_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ parse->interval = g_value_get_int (value);
+ break;
+ case PROP_UPDATE_TIMECODE:
+ parse->update_timecode = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_h264_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstH264Parse *parse;
+
+ parse = GST_H264_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_int (value, parse->interval);
+ break;
+ case PROP_UPDATE_TIMECODE:
+ g_value_set_boolean (value, parse->update_timecode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
--- /dev/null
- gst_caps_from_string (VIDEO_CAPS)));
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+ /**
+ * SECTION:element-simplevideomark
+ * @title: simplevideomark
+ * @see_also: #GstSimpleVideoMarkDetect
+ *
+ * This plugin produces #GstSimpleVideoMark:pattern-count squares in the bottom left
+ * corner of the video frames. The squares have a width and height of
+ * respectively #GstSimpleVideoMark:pattern-width and #GstSimpleVideoMark:pattern-height.
+ * Even squares will be black and odd squares will be white.
+ *
+ * After writing the pattern, #GstSimpleVideoMark:pattern-data-count squares after the
+ * pattern squares are produced as the bitarray given in
+ * #GstSimpleVideoMark:pattern-data. 1 bits will produce white squares and 0 bits will
+ * produce black squares.
+ *
+ * The element can be enabled with the #GstSimpleVideoMark:enabled property. It is
+ * mostly used together with the #GstSimpleVideoMarkDetect plugin.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! simplevideomark ! videoconvert ! ximagesink
+ * ]| Add the default black/white squares at the bottom left of the video frames.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideofilter.h>
+ #include "gstsimplevideomark.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_video_mark_debug_category);
+ #define GST_CAT_DEFAULT gst_video_mark_debug_category
+
+ /* prototypes */
+
+
+ static void gst_video_mark_set_property (GObject * object,
+ guint property_id, const GValue * value, GParamSpec * pspec);
+ static void gst_video_mark_get_property (GObject * object,
+ guint property_id, GValue * value, GParamSpec * pspec);
+ static void gst_video_mark_dispose (GObject * object);
+ static void gst_video_mark_finalize (GObject * object);
+
+ static gboolean gst_video_mark_start (GstBaseTransform * trans);
+ static gboolean gst_video_mark_stop (GstBaseTransform * trans);
+ static gboolean gst_video_mark_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+ static GstFlowReturn gst_video_mark_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame);
+
+ enum
+ {
+ PROP_0,
+ PROP_PATTERN_WIDTH,
+ PROP_PATTERN_HEIGHT,
+ PROP_PATTERN_COUNT,
+ PROP_PATTERN_DATA_COUNT,
+ PROP_PATTERN_DATA,
+ PROP_ENABLED,
+ PROP_LEFT_OFFSET,
+ PROP_BOTTOM_OFFSET
+ };
+
+ #define DEFAULT_PATTERN_WIDTH 4
+ #define DEFAULT_PATTERN_HEIGHT 16
+ #define DEFAULT_PATTERN_COUNT 4
+ #define DEFAULT_PATTERN_DATA_COUNT 5
+ #define DEFAULT_PATTERN_DATA 10
+ #define DEFAULT_ENABLED TRUE
+ #define DEFAULT_LEFT_OFFSET 0
+ #define DEFAULT_BOTTOM_OFFSET 0
+
+ /* pad templates */
+
+ #define VIDEO_CAPS \
+ GST_VIDEO_CAPS_MAKE( \
+ "{ I420, YV12, Y41B, Y42B, Y444, YUY2, UYVY, AYUV, YVYU }")
+
+
+ /* class initialization */
+
+ G_DEFINE_TYPE_WITH_CODE (GstSimpleVideoMark, gst_video_mark,
+ GST_TYPE_VIDEO_FILTER,
+ GST_DEBUG_CATEGORY_INIT (gst_video_mark_debug_category, "simplevideomark",
+ 0, "debug category for simplevideomark element"));
+ GST_ELEMENT_REGISTER_DEFINE (simplevideomark, "simplevideomark",
+ GST_RANK_NONE, GST_TYPE_SIMPLE_VIDEO_MARK);
+
+ static void
+ gst_video_mark_class_init (GstSimpleVideoMarkClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstBaseTransformClass *base_transform_class =
+ GST_BASE_TRANSFORM_CLASS (klass);
+ GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
++ GstCaps *tmp = NULL;
+
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_caps_from_string (VIDEO_CAPS)));
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+
+ gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
+ "Video marker", "Filter/Effect/Video",
+ "Marks a video signal with a pattern", "Wim Taymans <wim@fluendo.com>");
+
+ gobject_class->set_property = gst_video_mark_set_property;
+ gobject_class->get_property = gst_video_mark_get_property;
+ gobject_class->dispose = gst_video_mark_dispose;
+ gobject_class->finalize = gst_video_mark_finalize;
+ base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_mark_start);
+ base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_mark_stop);
+ video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_mark_set_info);
+ video_filter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_video_mark_transform_frame_ip);
+
+ g_object_class_install_property (gobject_class, PROP_PATTERN_WIDTH,
+ g_param_spec_int ("pattern-width", "Pattern width",
+ "The width of the pattern markers", 1, G_MAXINT,
+ DEFAULT_PATTERN_WIDTH,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PATTERN_HEIGHT,
+ g_param_spec_int ("pattern-height", "Pattern height",
+ "The height of the pattern markers", 1, G_MAXINT,
+ DEFAULT_PATTERN_HEIGHT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PATTERN_COUNT,
+ g_param_spec_int ("pattern-count", "Pattern count",
+ "The number of pattern markers", 0, G_MAXINT,
+ DEFAULT_PATTERN_COUNT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PATTERN_DATA_COUNT,
+ g_param_spec_int ("pattern-data-count", "Pattern data count",
+ "The number of extra data pattern markers", 0, 64,
+ DEFAULT_PATTERN_DATA_COUNT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PATTERN_DATA,
+ g_param_spec_uint64 ("pattern-data", "Pattern data",
+ "The extra data pattern markers", 0, G_MAXUINT64,
+ DEFAULT_PATTERN_DATA,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_ENABLED,
+ g_param_spec_boolean ("enabled", "Enabled",
+ "Enable or disable the filter",
+ DEFAULT_ENABLED,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LEFT_OFFSET,
+ g_param_spec_int ("left-offset", "Left Offset",
+ "The offset from the left border where the pattern starts", 0,
+ G_MAXINT, DEFAULT_LEFT_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BOTTOM_OFFSET,
+ g_param_spec_int ("bottom-offset", "Bottom Offset",
+ "The offset from the bottom border where the pattern starts", 0,
+ G_MAXINT, DEFAULT_BOTTOM_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ }
+
+ static void
+ gst_video_mark_init (GstSimpleVideoMark * simplevideomark)
+ {
+ }
+
+ void
+ gst_video_mark_set_property (GObject * object, guint property_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (object);
+
+ GST_DEBUG_OBJECT (simplevideomark, "set_property");
+
+ switch (property_id) {
+ case PROP_PATTERN_WIDTH:
+ simplevideomark->pattern_width = g_value_get_int (value);
+ break;
+ case PROP_PATTERN_HEIGHT:
+ simplevideomark->pattern_height = g_value_get_int (value);
+ break;
+ case PROP_PATTERN_COUNT:
+ simplevideomark->pattern_count = g_value_get_int (value);
+ break;
+ case PROP_PATTERN_DATA_COUNT:
+ simplevideomark->pattern_data_count = g_value_get_int (value);
+ break;
+ case PROP_PATTERN_DATA:
+ simplevideomark->pattern_data = g_value_get_uint64 (value);
+ break;
+ case PROP_ENABLED:
+ simplevideomark->enabled = g_value_get_boolean (value);
+ break;
+ case PROP_LEFT_OFFSET:
+ simplevideomark->left_offset = g_value_get_int (value);
+ break;
+ case PROP_BOTTOM_OFFSET:
+ simplevideomark->bottom_offset = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ void
+ gst_video_mark_get_property (GObject * object, guint property_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (object);
+
+ GST_DEBUG_OBJECT (simplevideomark, "get_property");
+
+ switch (property_id) {
+ case PROP_PATTERN_WIDTH:
+ g_value_set_int (value, simplevideomark->pattern_width);
+ break;
+ case PROP_PATTERN_HEIGHT:
+ g_value_set_int (value, simplevideomark->pattern_height);
+ break;
+ case PROP_PATTERN_COUNT:
+ g_value_set_int (value, simplevideomark->pattern_count);
+ break;
+ case PROP_PATTERN_DATA_COUNT:
+ g_value_set_int (value, simplevideomark->pattern_data_count);
+ break;
+ case PROP_PATTERN_DATA:
+ g_value_set_uint64 (value, simplevideomark->pattern_data);
+ break;
+ case PROP_ENABLED:
+ g_value_set_boolean (value, simplevideomark->enabled);
+ break;
+ case PROP_LEFT_OFFSET:
+ g_value_set_int (value, simplevideomark->left_offset);
+ break;
+ case PROP_BOTTOM_OFFSET:
+ g_value_set_int (value, simplevideomark->bottom_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ void
+ gst_video_mark_dispose (GObject * object)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (object);
+
+ GST_DEBUG_OBJECT (simplevideomark, "dispose");
+
+ /* clean up as possible. may be called multiple times */
+
+ G_OBJECT_CLASS (gst_video_mark_parent_class)->dispose (object);
+ }
+
+ void
+ gst_video_mark_finalize (GObject * object)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (object);
+
+ GST_DEBUG_OBJECT (simplevideomark, "finalize");
+
+ /* clean up object here */
+
+ G_OBJECT_CLASS (gst_video_mark_parent_class)->finalize (object);
+ }
+
+ static gboolean
+ gst_video_mark_start (GstBaseTransform * trans)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (trans);
+
+ GST_DEBUG_OBJECT (simplevideomark, "start");
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_video_mark_stop (GstBaseTransform * trans)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (trans);
+
+ GST_DEBUG_OBJECT (simplevideomark, "stop");
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_video_mark_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (filter);
+
+ GST_DEBUG_OBJECT (simplevideomark, "set_info");
+
+ return TRUE;
+ }
+
+ static void
+ gst_video_mark_draw_box (GstSimpleVideoMark * simplevideomark, guint8 * data,
+ gint width, gint height, gint row_stride, gint pixel_stride, guint8 color)
+ {
+ gint i, j;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ data[pixel_stride * j] = color;
+ }
+ data += row_stride;
+ }
+ }
+
+ static gint
+ calculate_pw (gint pw, gint x, gint width)
+ {
+ if (x < 0)
+ pw += x;
+ else if ((x + pw) > width)
+ pw = width - x;
+
+ return pw;
+ }
+
+ static GstFlowReturn
+ gst_video_mark_yuv (GstSimpleVideoMark * simplevideomark, GstVideoFrame * frame)
+ {
+ gint i, pw, ph, row_stride, pixel_stride;
+ gint width, height, offset_calc, x, y;
+ guint8 *d;
+ guint64 pattern_shift;
+ guint8 color;
+ gint total_pattern;
+
+ width = frame->info.width;
+ height = frame->info.height;
+
+ pw = simplevideomark->pattern_width;
+ ph = simplevideomark->pattern_height;
+ row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+
+ d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ offset_calc =
+ row_stride * (height - ph - simplevideomark->bottom_offset) +
+ pixel_stride * simplevideomark->left_offset;
+ x = simplevideomark->left_offset;
+ y = height - ph - simplevideomark->bottom_offset;
+
+ total_pattern =
+ simplevideomark->pattern_count + simplevideomark->pattern_data_count;
+ /* If x and y offset values are outside the video, no need to draw */
+ if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0
+ || y > height) {
+ GST_ERROR_OBJECT (simplevideomark,
+ "simplevideomark pattern is outside the video. Not drawing.");
+ return GST_FLOW_OK;
+ }
+
+ /* Offset calculation less than 0, then reset to 0 */
+ if (offset_calc < 0)
+ offset_calc = 0;
+ /* Y position of mark is negative or pattern exceeds the video height,
+ then recalculate pattern height for partial display */
+ if (y < 0)
+ ph += y;
+ else if ((y + ph) > height)
+ ph = height - y;
+ /* If pattern height is less than 0, need not draw anything */
+ if (ph < 0)
+ return GST_FLOW_OK;
+
+ /* move to start of bottom left */
+ d += offset_calc;
+
+ /* draw the bottom left pixels */
+ for (i = 0; i < simplevideomark->pattern_count; i++) {
+ gint draw_pw;
+
+ if (i & 1)
+ /* odd pixels must be white */
+ color = 255;
+ else
+ color = 0;
+
+ /* X position of mark is negative or pattern exceeds the video width,
+ then recalculate pattern width for partial display */
+ draw_pw = calculate_pw (pw, x, width);
+ /* If pattern width is less than 0, continue with the next pattern */
+ if (draw_pw < 0)
+ continue;
+
+ /* draw box of width * height */
+ gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,
+ pixel_stride, color);
+
+ /* move to i-th pattern */
+ d += pixel_stride * draw_pw;
+ x += draw_pw;
+
+ if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)
+ return GST_FLOW_OK;
+ }
+
+ pattern_shift =
+ G_GUINT64_CONSTANT (1) << (simplevideomark->pattern_data_count - 1);
+
+ /* get the data of the pattern */
+ for (i = 0; i < simplevideomark->pattern_data_count; i++) {
+ gint draw_pw;
+ if (simplevideomark->pattern_data & pattern_shift)
+ color = 255;
+ else
+ color = 0;
+
+ /* X position of mark is negative or pattern exceeds the video width,
+ then recalculate pattern width for partial display */
+ draw_pw = calculate_pw (pw, x, width);
+ /* If pattern width is less than 0, continue with the next pattern */
+ if (draw_pw < 0)
+ continue;
+
+ gst_video_mark_draw_box (simplevideomark, d, draw_pw, ph, row_stride,
+ pixel_stride, color);
+
+ pattern_shift >>= 1;
+
+ /* move to i-th pattern data */
+ d += pixel_stride * draw_pw;
+ x += draw_pw;
+
+ if ((x + (pw * (simplevideomark->pattern_data_count - i - 1))) < 0
+ || x >= width)
+ return GST_FLOW_OK;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+
+ static GstFlowReturn
+ gst_video_mark_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame)
+ {
+ GstSimpleVideoMark *simplevideomark = GST_SIMPLE_VIDEO_MARK (filter);
+
+ GST_DEBUG_OBJECT (simplevideomark, "transform_frame_ip");
+
+ if (simplevideomark->enabled)
+ return gst_video_mark_yuv (simplevideomark, frame);
+
+ return GST_FLOW_OK;
+ }
--- /dev/null
- gst_caps_from_string (VIDEO_CAPS)));
+ /* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+ /**
+ * SECTION:element-videoanalyse
+ * @title: videoanalyse
+ *
+ * This plugin analyses every video frame and if the #GstVideoAnalyse:message
+ * property is %TRUE, posts an element message with video statistics called
+ * `GstVideoAnalyse`.
+ *
+ * The message's structure contains these fields:
+ *
+ * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
+ *
+ * * #GstClockTime `stream-time`: the stream time of the buffer.
+ *
+ * * #GstClockTime `running-time`: the running_time of the buffer.
+ *
+ * * #GstClockTime`duration`:the duration of the buffer.
+ *
+ * * #gdouble`luma-average`: the average brightness of the frame. Range: 0.0-1.0
+ *
+ * * #gdouble`luma-variance`: the brightness variance of the frame.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -m videotestsrc ! videoanalyse ! videoconvert ! ximagesink
+ * ]| This pipeline emits messages to the console for each frame that has been analysed.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideofilter.h>
+ #include "gstvideoanalyse.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_video_analyse_debug_category);
+ #define GST_CAT_DEFAULT gst_video_analyse_debug_category
+
+ /* prototypes */
+
+
+ static void gst_video_analyse_set_property (GObject * object,
+ guint property_id, const GValue * value, GParamSpec * pspec);
+ static void gst_video_analyse_get_property (GObject * object,
+ guint property_id, GValue * value, GParamSpec * pspec);
+ static void gst_video_analyse_finalize (GObject * object);
+
+ static GstFlowReturn gst_video_analyse_transform_frame_ip (GstVideoFilter *
+ filter, GstVideoFrame * frame);
+
+ enum
+ {
+ PROP_0,
+ PROP_MESSAGE
+ };
+
+ #define DEFAULT_MESSAGE TRUE
+
+ #define VIDEO_CAPS \
+ GST_VIDEO_CAPS_MAKE("{ I420, YV12, Y444, Y42B, Y41B }")
+
+
+ /* class initialization */
+
+ G_DEFINE_TYPE_WITH_CODE (GstVideoAnalyse, gst_video_analyse,
+ GST_TYPE_VIDEO_FILTER,
+ GST_DEBUG_CATEGORY_INIT (gst_video_analyse_debug_category, "videoanalyse",
+ 0, "debug category for videoanalyse element"));
+ GST_ELEMENT_REGISTER_DEFINE (videoanalyse, "videoanalyse",
+ GST_RANK_NONE, GST_TYPE_VIDEO_ANALYSE);
+
+ static void
+ gst_video_analyse_class_init (GstVideoAnalyseClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
++ GstCaps *tmp = NULL;
+ GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
+
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_caps_from_string (VIDEO_CAPS)));
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+ gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
++ tmp = gst_caps_from_string (VIDEO_CAPS)));
++ gst_caps_unref (tmp);
+
+ gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
+ "Video analyser", "Filter/Analyzer/Video",
+ "Analyse video signal", "Wim Taymans <wim@fluendo.com>");
+
+ gobject_class->set_property = gst_video_analyse_set_property;
+ gobject_class->get_property = gst_video_analyse_get_property;
+ gobject_class->finalize = gst_video_analyse_finalize;
+ video_filter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_video_analyse_transform_frame_ip);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MESSAGE,
+ g_param_spec_boolean ("message", "Message",
+ "Post statics messages",
+ DEFAULT_MESSAGE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ //trans_class->passthrough_on_same_caps = TRUE;
+ }
+
+ static void
+ gst_video_analyse_init (GstVideoAnalyse * videoanalyse)
+ {
+ }
+
+ void
+ gst_video_analyse_set_property (GObject * object, guint property_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
+
+ GST_DEBUG_OBJECT (videoanalyse, "set_property");
+
+ switch (property_id) {
+ case PROP_MESSAGE:
+ videoanalyse->message = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ void
+ gst_video_analyse_get_property (GObject * object, guint property_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
+
+ GST_DEBUG_OBJECT (videoanalyse, "get_property");
+
+ switch (property_id) {
+ case PROP_MESSAGE:
+ g_value_set_boolean (value, videoanalyse->message);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ void
+ gst_video_analyse_finalize (GObject * object)
+ {
+ GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
+
+ GST_DEBUG_OBJECT (videoanalyse, "finalize");
+
+ /* clean up object here */
+
+ G_OBJECT_CLASS (gst_video_analyse_parent_class)->finalize (object);
+ }
+
+ static void
+ gst_video_analyse_post_message (GstVideoAnalyse * videoanalyse,
+ GstVideoFrame * frame)
+ {
+ GstBaseTransform *trans;
+ GstMessage *m;
+ guint64 duration, timestamp, running_time, stream_time;
+
+ trans = GST_BASE_TRANSFORM_CAST (videoanalyse);
+
+ /* get timestamps */
+ timestamp = GST_BUFFER_TIMESTAMP (frame->buffer);
+ duration = GST_BUFFER_DURATION (frame->buffer);
+ running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+ stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ m = gst_message_new_element (GST_OBJECT_CAST (videoanalyse),
+ gst_structure_new ("GstVideoAnalyse",
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "stream-time", G_TYPE_UINT64, stream_time,
+ "running-time", G_TYPE_UINT64, running_time,
+ "duration", G_TYPE_UINT64, duration,
+ "luma-average", G_TYPE_DOUBLE, videoanalyse->luma_average,
+ "luma-variance", G_TYPE_DOUBLE, videoanalyse->luma_variance, NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (videoanalyse), m);
+ }
+
+ static void
+ gst_video_analyse_planar (GstVideoAnalyse * videoanalyse, GstVideoFrame * frame)
+ {
+ guint64 sum;
+ gint avg, diff;
+ gint i, j;
+ guint8 *d;
+ gint width = frame->info.width;
+ gint height = frame->info.height;
+ gint stride;
+
+ d = frame->data[0];
+ stride = frame->info.stride[0];
+ sum = 0;
+ /* do brightness as average of pixel brightness in 0.0 to 1.0 */
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ sum += d[j];
+ }
+ d += stride;
+ }
+ avg = sum / (width * height);
+ videoanalyse->luma_average = sum / (255.0 * width * height);
+
+ d = frame->data[0];
+ stride = frame->info.stride[0];
+ sum = 0;
+ /* do variance */
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ diff = (avg - d[j]);
+ sum += diff * diff;
+ }
+ d += stride;
+ }
+ videoanalyse->luma_variance = sum / (255.0 * 255.0 * width * height);
+ }
+
+ static GstFlowReturn
+ gst_video_analyse_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame)
+ {
+ GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (filter);
+
+ GST_DEBUG_OBJECT (videoanalyse, "transform_frame_ip");
+
+ gst_video_analyse_planar (videoanalyse, frame);
+
+ if (videoanalyse->message)
+ gst_video_analyse_post_message (videoanalyse, frame);
+
+ return GST_FLOW_OK;
+ }
--- /dev/null
+ project('gst-plugins-bad', 'c', 'cpp',
+ version : '1.19.2',
+ meson_version : '>= 0.54',
+ default_options : [ 'warning_level=1',
+ 'buildtype=debugoptimized' ])
+
+ gst_version = meson.project_version()
+ version_arr = gst_version.split('.')
+ gst_version_major = version_arr[0].to_int()
+ gst_version_minor = version_arr[1].to_int()
+ gst_version_micro = version_arr[2].to_int()
+ if version_arr.length() == 4
+ gst_version_nano = version_arr[3].to_int()
+ else
+ gst_version_nano = 0
+ endif
+ gst_version_is_dev = gst_version_minor % 2 == 1 and gst_version_micro < 90
+
+ glib_req = '>= 2.56.0'
+ orc_req = '>= 0.4.17'
+ gst_req = '>= @0@.@1@.0'.format(gst_version_major, gst_version_minor)
+
+ api_version = '1.0'
+ soversion = 0
+ # maintaining compatibility with the previous libtool versioning
+ # current = minor * 100 + micro
+ curversion = gst_version_minor * 100 + gst_version_micro
+ libversion = '@0@.@1@.0'.format(soversion, curversion)
+ osxversion = curversion + 1
+
+ plugins_install_dir = join_paths(get_option('libdir'), 'gstreamer-1.0')
+ plugins = []
+
+ cc = meson.get_compiler('c')
+ cxx = meson.get_compiler('cpp')
+ host_system = host_machine.system()
+
+ if host_system in ['ios', 'darwin']
+ have_objc = add_languages('objc', native: false)
+ have_objcpp = add_languages('objcpp', native: false)
+ else
+ have_objc = false
+ have_objcpp = false
+ endif
+
+ cdata = configuration_data()
+
+ if cc.get_id() == 'msvc'
+ msvc_args = [
+ # Ignore several spurious warnings for things gstreamer does very commonly
+ # If a warning is completely useless and spammy, use '/wdXXXX' to suppress it
+ # If a warning is harmless but hard to fix, use '/woXXXX' so it's shown once
+ # NOTE: Only add warnings here if you are sure they're spurious
+ '/wd4018', # implicit signed/unsigned conversion
+ '/wd4146', # unary minus on unsigned (beware INT_MIN)
+ '/wd4244', # lossy type conversion (e.g. double -> int)
+ '/wd4305', # truncating type conversion (e.g. double -> float)
+ cc.get_supported_arguments(['/utf-8']), # set the input encoding to utf-8
+
+ # Enable some warnings on MSVC to match GCC/Clang behaviour
+ '/w14062', # enumerator 'identifier' in switch of enum 'enumeration' is not handled
+ '/w14101', # 'identifier' : unreferenced local variable
+ '/w14189', # 'identifier' : local variable is initialized but not referenced
+ ]
+ add_project_arguments(msvc_args, language: ['c', 'cpp'])
+ # Disable SAFESEH with MSVC for plugins and libs that use external deps that
+ # are built with MinGW
+ noseh_link_args = ['/SAFESEH:NO']
+ else
+ if cxx.has_argument('-Wno-non-virtual-dtor')
+ add_project_arguments('-Wno-non-virtual-dtor', language: 'cpp')
+ endif
+
+ noseh_link_args = []
+ endif
+
+ if cc.has_link_argument('-Wl,-Bsymbolic-functions')
+ add_project_link_arguments('-Wl,-Bsymbolic-functions', language : 'c')
+ endif
+
+ # Symbol visibility
+ if cc.get_id() == 'msvc'
+ export_define = '__declspec(dllexport) extern'
+ elif cc.has_argument('-fvisibility=hidden')
+ add_project_arguments('-fvisibility=hidden', language: 'c')
+ add_project_arguments('-fvisibility=hidden', language: 'cpp')
+ if have_objc
+ add_project_arguments('-fvisibility=hidden', language: 'objc')
+ endif
+ export_define = 'extern __attribute__ ((visibility ("default")))'
+ else
+ export_define = 'extern'
+ endif
+
+ # Passing this through the command line would be too messy
+ cdata.set('GST_API_EXPORT', export_define)
+
+ # Disable strict aliasing
+ if cc.has_argument('-fno-strict-aliasing')
+ add_project_arguments('-fno-strict-aliasing', language: 'c')
+ endif
+ if cxx.has_argument('-fno-strict-aliasing')
+ add_project_arguments('-fno-strict-aliasing', language: 'cpp')
+ endif
+
+ # Define G_DISABLE_DEPRECATED for development versions
+ if gst_version_is_dev
+ message('Disabling deprecated GLib API')
+ add_project_arguments('-DG_DISABLE_DEPRECATED', language: 'c')
+ endif
+
+ cast_checks = get_option('gobject-cast-checks')
+ if cast_checks.disabled() or (cast_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib cast checks')
+ add_project_arguments('-DG_DISABLE_CAST_CHECKS', language: 'c')
+ endif
+
+ glib_asserts = get_option('glib-asserts')
+ if glib_asserts.disabled() or (glib_asserts.auto() and not gst_version_is_dev)
+ message('Disabling GLib asserts')
+ add_project_arguments('-DG_DISABLE_ASSERT', language: 'c')
+ endif
+
+ glib_checks = get_option('glib-checks')
+ if glib_checks.disabled() or (glib_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib checks')
+ add_project_arguments('-DG_DISABLE_CHECKS', language: 'c')
+ endif
+
+ check_headers = [
+ ['HAVE_DLFCN_H', 'dlfcn.h'],
+ ['HAVE_FCNTL_H', 'fcntl.h'],
+ ['HAVE_INTTYPES_H', 'inttypes.h'],
+ ['HAVE_MEMORY_H', 'memory.h'],
+ ['HAVE_NETINET_IN_H', 'netinet/in.h'],
+ ['HAVE_NETINET_IP_H', 'netinet/ip.h'],
+ ['HAVE_NETINET_TCP_H', 'netinet/tcp.h'],
+ ['HAVE_PTHREAD_H', 'pthread.h'],
+ ['HAVE_STDINT_H', 'stdint.h'],
+ ['HAVE_STDLIB_H', 'stdlib.h'],
+ ['HAVE_STRINGS_H', 'strings.h'],
+ ['HAVE_STRING_H', 'string.h'],
+ ['HAVE_SYS_PARAM_H', 'sys/param.h'],
+ ['HAVE_SYS_SOCKET_H', 'sys/socket.h'],
+ ['HAVE_SYS_STAT_H', 'sys/stat.h'],
+ ['HAVE_SYS_TIME_H', 'sys/time.h'],
+ ['HAVE_SYS_TYPES_H', 'sys/types.h'],
+ ['HAVE_SYS_UTSNAME_H', 'sys/utsname.h'],
+ ['HAVE_UNISTD_H', 'unistd.h'],
+ ['HAVE_WINDOWS_H', 'windows.h'],
+ ['HAVE_WINSOCK2_H', 'winsock2.h'],
+ ['HAVE_WS2TCPIP_H', 'ws2tcpip.h'],
+ ]
+
+ foreach h : check_headers
+ if cc.has_header(h.get(1))
+ cdata.set(h.get(0), 1)
+ endif
+ endforeach
+
+ check_functions = [
+ ['HAVE_DCGETTEXT', 'dcgettext'],
+ ['HAVE_GETPAGESIZE', 'getpagesize'],
+ ['HAVE_GMTIME_R', 'gmtime_r'],
+ ['HAVE_MEMFD_CREATE', 'memfd_create'],
+ ['HAVE_MMAP', 'mmap'],
+ ['HAVE_PIPE2', 'pipe2'],
+ ['HAVE_GETRUSAGE', 'getrusage', '#include<sys/resource.h>'],
+ ]
+
+ foreach f : check_functions
+ prefix = ''
+ if f.length() == 3
+ prefix = f.get(2)
+ endif
+ if cc.has_function(f.get(1), prefix: prefix)
+ cdata.set(f.get(0), 1)
+ endif
+ endforeach
+
+ cdata.set('SIZEOF_CHAR', cc.sizeof('char'))
+ cdata.set('SIZEOF_INT', cc.sizeof('int'))
+ cdata.set('SIZEOF_LONG', cc.sizeof('long'))
+ cdata.set('SIZEOF_SHORT', cc.sizeof('short'))
+ cdata.set('SIZEOF_VOIDP', cc.sizeof('void*'))
+
+ cdata.set_quoted('VERSION', gst_version)
+ cdata.set_quoted('PACKAGE', 'gst-plugins-bad')
+ cdata.set_quoted('PACKAGE_VERSION', gst_version)
+ cdata.set_quoted('PACKAGE_BUGREPORT', 'https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/issues/new')
+ cdata.set_quoted('PACKAGE_NAME', 'GStreamer Bad Plug-ins')
+ cdata.set_quoted('GETTEXT_PACKAGE', 'gst-plugins-bad-1.0')
+ cdata.set_quoted('GST_API_VERSION', api_version)
+ cdata.set_quoted('GST_LICENSE', 'LGPL')
+ cdata.set_quoted('LIBDIR', join_paths(get_option('prefix'), get_option('libdir')))
+ cdata.set_quoted('LOCALEDIR', join_paths(get_option('prefix'), get_option('localedir')))
+
+ warning_flags = [
+ '-Wmissing-declarations',
+ '-Wredundant-decls',
+ '-Wwrite-strings',
+ '-Wformat',
+ '-Wformat-security',
+ '-Winit-self',
+ '-Wmissing-include-dirs',
+ '-Waddress',
+ '-Wno-multichar',
+ '-Wvla',
+ '-Wpointer-arith',
+ ]
+
+ warning_c_flags = [
+ '-Wmissing-prototypes',
+ '-Wdeclaration-after-statement',
+ '-Wold-style-definition',
+ ]
+
+ warning_cxx_flags = [
+ '-Wformat-nonliteral',
+ ]
+
+ foreach extra_arg : warning_c_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ endforeach
+
+ foreach extra_arg : warning_cxx_flags
+ if cxx.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'cpp')
+ endif
+ endforeach
+
+ foreach extra_arg : warning_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ if cxx.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'cpp')
+ endif
+ endforeach
+
+ # GStreamer package name and origin url
+ gst_package_name = get_option('package-name')
+ if gst_package_name == ''
+ if gst_version_nano == 0
+ gst_package_name = 'GStreamer Bad Plug-ins source release'
+ elif gst_version_nano == 1
+ gst_package_name = 'GStreamer Bad Plug-ins git'
+ else
+ gst_package_name = 'GStreamer Bad Plug-ins prerelease'
+ endif
+ endif
+ cdata.set_quoted('GST_PACKAGE_NAME', gst_package_name)
+ cdata.set_quoted('GST_PACKAGE_ORIGIN', get_option('package-origin'))
+
+ # FIXME: This should be exposed as a configuration option
+ if host_system == 'linux'
+ cdata.set_quoted('DEFAULT_VIDEOSRC', 'v4l2src')
+ elif ['darwin', 'ios'].contains(host_system)
+ cdata.set_quoted('DEFAULT_VIDEOSRC', 'avfvideosrc')
+ cdata.set_quoted('GST_EXTRA_MODULE_SUFFIX', '.dylib')
+ # Yes, we set this for iOS too. Same as Autotools.
+ cdata.set('HAVE_OSX', 1)
+ else
+ cdata.set_quoted('DEFAULT_VIDEOSRC', 'videotestsrc')
+ endif
+
+ # Mandatory GST deps
+ gst_dep = dependency('gstreamer-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_dep'])
+ gstbase_dep = dependency('gstreamer-base-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_base_dep'])
+ gstnet_dep = dependency('gstreamer-net-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_net_dep'])
+ gstcontroller_dep = dependency('gstreamer-controller-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_controller_dep'])
+
+ gstpbutils_dep = dependency('gstreamer-pbutils-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'pbutils_dep'])
+ gstallocators_dep = dependency('gstreamer-allocators-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'allocators_dep'])
+ gstapp_dep = dependency('gstreamer-app-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'app_dep'])
+ gstaudio_dep = dependency('gstreamer-audio-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'audio_dep'])
+ gstfft_dep = dependency('gstreamer-fft-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'fft_dep'])
+ gstriff_dep = dependency('gstreamer-riff-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'riff_dep'])
+ gstrtp_dep = dependency('gstreamer-rtp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'rtp_dep'])
+ gstrtsp_dep = dependency('gstreamer-rtsp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'rtsp_dep'])
+ gstsdp_dep = dependency('gstreamer-sdp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'sdp_dep'])
+ gsttag_dep = dependency('gstreamer-tag-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'tag_dep'])
+ gstvideo_dep = dependency('gstreamer-video-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'video_dep'])
+ gstcheck_dep = dependency('gstreamer-check-1.0', version : gst_req,
+ required : get_option('tests'),
+ fallback : ['gstreamer', 'gst_check_dep'])
+
+ # GStreamer OpenGL
+ gstgl_dep = dependency('gstreamer-gl-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstgl_dep'], required: get_option('gl'))
+ gstglproto_dep = dependency('gstreamer-gl-prototypes-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglproto_dep'], required: get_option('gl'))
+ gstglx11_dep = dependency('', required : false)
+ gstglwayland_dep = dependency('', required : false)
+ gstglegl_dep = dependency('', required : false)
+
+ if gstgl_dep.found()
+ if gstgl_dep.type_name() == 'pkgconfig'
+ gst_gl_apis = gstgl_dep.get_pkgconfig_variable('gl_apis').split()
+ gst_gl_winsys = gstgl_dep.get_pkgconfig_variable('gl_winsys').split()
+ gst_gl_platforms = gstgl_dep.get_pkgconfig_variable('gl_platforms').split()
+ else
+ gstbase = subproject('gst-plugins-base')
+ gst_gl_apis = gstbase.get_variable('enabled_gl_apis')
+ gst_gl_winsys = gstbase.get_variable('enabled_gl_winsys')
+ gst_gl_platforms = gstbase.get_variable('enabled_gl_platforms')
+ endif
+
+ message('GStreamer OpenGL window systems: @0@'.format(' '.join(gst_gl_winsys)))
+ message('GStreamer OpenGL platforms: @0@'.format(' '.join(gst_gl_platforms)))
+ message('GStreamer OpenGL apis: @0@'.format(' '.join(gst_gl_apis)))
+
+ foreach ws : ['x11', 'wayland', 'android', 'cocoa', 'eagl', 'win32', 'dispmanx', 'viv_fb']
+ set_variable('gst_gl_have_window_@0@'.format(ws), gst_gl_winsys.contains(ws))
+ endforeach
+
+ foreach p : ['glx', 'egl', 'cgl', 'eagl', 'wgl']
+ set_variable('gst_gl_have_platform_@0@'.format(p), gst_gl_platforms.contains(p))
+ endforeach
+
+ foreach api : ['gl', 'gles2']
+ set_variable('gst_gl_have_api_@0@'.format(api), gst_gl_apis.contains(api))
+ endforeach
+
+ # Behind specific checks because meson fails at optional dependencies with a
+ # fallback to the same subproject. On the first failure, meson will never
+ # check the system again even if the fallback never existed.
+ # Last checked with meson 0.54.3
+ if gst_gl_have_window_x11
+ gstglx11_dep = dependency('gstreamer-gl-x11-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglx11_dep'], required: true)
+ endif
+ if gst_gl_have_window_wayland
+ gstglwayland_dep = dependency('gstreamer-gl-wayland-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglwayland_dep'], required: true)
+ endif
+ if gst_gl_have_platform_egl
+ gstglegl_dep = dependency('gstreamer-gl-egl-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglegl_dep'], required: true)
+ endif
+ endif
+
+ libm = cc.find_library('m', required : false)
+ glib_dep = dependency('glib-2.0', version : glib_req, fallback: ['glib', 'libglib_dep'])
+ gmodule_dep = dependency('gmodule-2.0', fallback: ['glib', 'libgmodule_dep'])
+ gio_dep = dependency('gio-2.0', fallback: ['glib', 'libgio_dep'])
+ # gio-unix-2.0 is used by sys/bluez
+
+ # Optional dep of ext/gl and gst/librfb
+ x11_dep = dependency('x11', required : get_option('x11'))
+ if x11_dep.found()
+ cdata.set('HAVE_X11', 1)
+ endif
+
+ if host_machine.system() == 'windows'
+ winsock2 = [cc.find_library('ws2_32')]
+ else
+ winsock2 = []
+ endif
+
+ if ['darwin', 'ios'].contains(host_system)
+ if not have_objc
+ error('Building on MacOS/iOS/etc requires an ObjC compiler')
+ endif
+ if host_system == 'ios'
+ cdata.set('HAVE_IOS', 1)
+ endif
+
+ avfoundation_dep = dependency('AVFoundation', required : false)
+ if avfoundation_dep.found()
+ cdata.set('HAVE_AVFOUNDATION', 1)
+ endif
+
+ videotoolbox_dep = dependency('VideoToolbox', required : false)
+ if videotoolbox_dep.found()
+ cdata.set('HAVE_VIDEOTOOLBOX', 1)
+ endif
+
+ # FIXME: framework.version() returns 'unknown'
+ # if videotoolbox_dep.version().version_compare('>=10.9.6')
+ # cdata.set('HAVE_VIDEOTOOLBOX_10_9_6', 1)
+ # endif
+ endif
+
+ have_orcc = false
+ orcc_args = []
+ orc_targets = []
+ # Used by various libraries/elements that use Orc code
+ orc_dep = dependency('orc-0.4', version : orc_req, required : get_option('orc'),
+ fallback : ['orc', 'orc_dep'])
+ orcc = find_program('orcc', required : get_option('orc'))
+ if orc_dep.found() and orcc.found()
+ have_orcc = true
+ orcc_args = [orcc, '--include', 'glib.h']
+ cdata.set('HAVE_ORC', 1)
+ else
+ message('Orc Compiler not found or disabled, will use backup C code')
+ cdata.set('DISABLE_ORC', 1)
+ endif
+ cdata.set('GST_ENABLE_EXTRA_CHECKS', not get_option('extra-checks').disabled())
+
+ gnustl_dep = declare_dependency()
+ if host_system == 'android'
+ gnustl_dep = dependency('gnustl', required : false)
+ endif
+
++# TIZEN BUILD OPTION
++cdata.set('TIZEN_FEATURE_ADAPTIVE_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_PROPERTY', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_EMPTY_VTT', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_UPDATE_SEGMENT', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_DISCONT_SEQUENCE', true)
++cdata.set('TIZEN_FEATURE_TSDEMUX_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_TSDEMUX_INVALID_PCR_PID', true)
++cdata.set('TIZEN_FEATURE_TSDEMUX_LANG_TAG', true)
++cdata.set('TIZEN_FEATURE_TSDEMUX_UPDATE_PMT', true)
++cdata.set('TIZEN_FEATURE_TSDEMUX_UPDATE_STREAM', true)
++cdata.set('TIZEN_FEATURE_HLS_WEBVTT', true)
++cdata.set('TIZEN_FEATURE_OALSINK_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_MPEGDEMUX_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_H264PARSE_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_AD', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_LANG_TAG', true)
++cdata.set('TIZEN_FEATURE_HLSDEMUX_DISCONT', true)
++cdata.set('TIZEN_FEATURE_IMPORT_NETSIM', true)
++cdata.set('TIZEN_FEATURE_PITCH_AUDIO_META', true)
++# TIZEN BUILD OPTION end
++
+ # Disable compiler warnings for unused variables and args if gst debug system is disabled
+ if gst_dep.type_name() == 'internal'
+ gst_debug_disabled = not subproject('gstreamer').get_variable('gst_debug')
+ else
+ # We can't check that in the case of subprojects as we won't
+ # be able to build against an internal dependency (which is not built yet)
+ gst_debug_disabled = cc.has_header_symbol('gst/gstconfig.h', 'GST_DISABLE_GST_DEBUG', dependencies: gst_dep)
+ endif
+
+ if gst_debug_disabled
+ message('GStreamer debug system is disabled')
+ if cc.has_argument('-Wno-unused')
+ add_project_arguments('-Wno-unused', language: 'c')
+ endif
+ if cxx.has_argument ('-Wno-unused')
+ add_project_arguments('-Wno-unused', language: 'cpp')
+ endif
+ else
+ message('GStreamer debug system is enabled')
+ endif
+
+ gst_plugins_bad_args = ['-DHAVE_CONFIG_H']
+ configinc = include_directories('.')
+ libsinc = include_directories('gst-libs')
+
+ python3 = import('python').find_installation()
+
+ gir = find_program('g-ir-scanner', required : get_option('introspection'))
+ gnome = import('gnome')
+ build_gir = gir.found() and (not meson.is_cross_build() or get_option('introspection').enabled())
+ gir_init_section = [ '--add-init-section=extern void gst_init(gint*,gchar**);' + \
+ 'g_setenv("GST_REGISTRY_1.0", "@0@", TRUE);'.format(meson.current_build_dir() + '/gir_empty_registry.reg') + \
+ 'g_setenv("GST_PLUGIN_PATH_1_0", "", TRUE);' + \
+ 'g_setenv("GST_PLUGIN_SYSTEM_PATH_1_0", "", TRUE);' + \
+ 'gst_init(NULL,NULL);', '--quiet']
+
+ presetdir = join_paths(get_option('datadir'), 'gstreamer-' + api_version, 'presets')
+
+ pkgconfig = import('pkgconfig')
+ plugins_pkgconfig_install_dir = join_paths(plugins_install_dir, 'pkgconfig')
+ if get_option('default_library') == 'shared'
+ # If we don't build static plugins there is no need to generate pc files
+ plugins_pkgconfig_install_dir = disabler()
+ endif
+
+ pkgconfig_variables = ['exec_prefix=${prefix}',
+ 'toolsdir=${exec_prefix}/bin',
+ 'pluginsdir=${libdir}/gstreamer-1.0',
+ 'datarootdir=${prefix}/share',
+ 'datadir=${datarootdir}',
+ 'girdir=${datadir}/gir-1.0',
+ 'typelibdir=${libdir}/girepository-1.0']
+
+ pkgconfig_subdirs = ['gstreamer-1.0']
+
+ pkgconfig.generate(
+ libraries : [gst_dep],
+ variables : pkgconfig_variables,
+ subdirs : pkgconfig_subdirs,
+ name : 'gstreamer-plugins-bad-1.0',
+ description : 'Streaming media framework, bad plugins libraries',
+ )
+
+ subdir('gst-libs')
+ subdir('gst')
+ subdir('sys')
+ subdir('ext')
+ subdir('tests')
+ subdir('data')
+ subdir('tools')
+
+ if have_orcc
+ update_orc_dist_files = find_program('scripts/update-orc-dist-files.py')
+
+ orc_update_targets = []
+ foreach t : orc_targets
+ orc_name = t.get('name')
+ orc_file = t.get('orc-source')
+ header = t.get('header')
+ source = t.get('source')
+ # alias_target() only works with build targets, so can't use run_target() here
+ orc_update_targets += [
+ custom_target('update-orc-@0@'.format(orc_name),
+ input: [header, source],
+ command: [update_orc_dist_files, orc_file, header, source],
+ output: ['@0@-dist.c'.format(orc_name)]) # not entirely true
+ ]
+ endforeach
+
+ if meson.version().version_compare('>= 0.52') and orc_update_targets.length() > 0
+ update_orc_dist_target = alias_target('update-orc-dist', orc_update_targets)
+ endif
+ endif
+
+ # xgettext is optional (on Windows for instance)
+ if find_program('xgettext', required : get_option('nls')).found()
+ cdata.set('ENABLE_NLS', 1)
+ subdir('po')
+ endif
+
+ subdir('scripts')
+
+ # Set release date
+ if gst_version_nano == 0
+ extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-bad.doap'))
+ if run_result.returncode() == 0
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
+ else
+ # Error out if our release can't be found in the .doap file
+ error(run_result.stderr())
+ endif
+ endif
+
+ if glib_dep.version().version_compare('< 2.67.4')
+ cdata.set('g_memdup2(ptr,sz)', '(G_LIKELY(((guint64)(sz)) < G_MAXUINT)) ? g_memdup(ptr,sz) : (g_abort(),NULL)')
+ endif
+
+ configure_file(output : 'config.h', configuration : cdata)
+
+ run_command(python3, '-c', 'import shutil; shutil.copy("hooks/pre-commit.hook", ".git/hooks/pre-commit")')
+
+ subdir('docs')
+
+ if meson.version().version_compare('>= 0.54')
+ plugin_names = []
+ foreach plugin: plugins
+ # FIXME: Use str.subtring() when we can depend on Meson 0.56
+ split = plugin.name().split('gst')
+ if split.length() == 2
+ plugin_names += [split[1]]
+ else
+ warning('Need substring API in meson >= 0.56 to properly parse plugin name: ' + plugin.name())
+ plugin_names += [plugin.name()]
+ endif
+ endforeach
+ summary({'Plugins':plugin_names}, list_sep: ', ')
+ endif
--- /dev/null
+ option('gst_play_tests', type: 'boolean', value: false,
+ description: 'Enable GstPlay tests that need network access')
+
+ # Feature options for plugins without external deps
+ option('accurip', type : 'feature', value : 'auto')
+ option('adpcmdec', type : 'feature', value : 'auto')
+ option('adpcmenc', type : 'feature', value : 'auto')
+ option('aiff', type : 'feature', value : 'auto')
+ option('asfmux', type : 'feature', value : 'auto')
+ option('audiobuffersplit', type : 'feature', value : 'auto')
+ option('audiofxbad', type : 'feature', value : 'auto')
+ option('audiolatency', type : 'feature', value : 'auto')
+ option('audiomixmatrix', type : 'feature', value : 'auto')
+ option('audiovisualizers', type : 'feature', value : 'auto')
+ option('autoconvert', type : 'feature', value : 'auto')
+ option('bayer', type : 'feature', value : 'auto')
+ option('camerabin2', type : 'feature', value : 'auto')
+ option('codecalpha', type : 'feature', value : 'auto')
+ option('coloreffects', type : 'feature', value : 'auto')
+ option('debugutils', type : 'feature', value : 'auto')
+ option('dvbsubenc', type : 'feature', value : 'auto')
+ option('dvbsuboverlay', type : 'feature', value : 'auto')
+ option('dvdspu', type : 'feature', value : 'auto')
+ option('faceoverlay', type : 'feature', value : 'auto')
+ option('festival', type : 'feature', value : 'auto')
+ option('fieldanalysis', type : 'feature', value : 'auto')
+ option('freeverb', type : 'feature', value : 'auto')
+ option('frei0r', type : 'feature', value : 'auto')
+ option('gaudieffects', type : 'feature', value : 'auto')
+ option('gdp', type : 'feature', value : 'auto')
+ option('geometrictransform', type : 'feature', value : 'auto')
+ option('id3tag', type : 'feature', value : 'auto')
+ option('inter', type : 'feature', value : 'auto')
+ option('interlace', type : 'feature', value : 'auto')
+ option('ivfparse', type : 'feature', value : 'auto')
+ option('ivtc', type : 'feature', value : 'auto')
+ option('jp2kdecimator', type : 'feature', value : 'auto')
+ option('jpegformat', type : 'feature', value : 'auto')
+ option('librfb', type : 'feature', value : 'auto')
+ option('midi', type : 'feature', value : 'auto')
+ option('mpegdemux', type : 'feature', value : 'auto')
+ option('mpegpsmux', type : 'feature', value : 'auto')
+ option('mpegtsdemux', type : 'feature', value : 'auto')
+ option('mpegtsmux', type : 'feature', value : 'auto')
+ option('mxf', type : 'feature', value : 'auto')
+ option('netsim', type : 'feature', value : 'auto')
+ option('onvif', type : 'feature', value : 'auto')
+ option('pcapparse', type : 'feature', value : 'auto')
+ option('pnm', type : 'feature', value : 'auto')
+ option('proxy', type : 'feature', value : 'auto')
+ option('qroverlay', type : 'feature', value : 'auto')
+ option('rawparse', type : 'feature', value : 'auto')
+ option('removesilence', type : 'feature', value : 'auto')
+ option('rist', type : 'feature', value : 'auto')
+ option('rtmp2', type : 'feature', value : 'auto')
+ option('rtp', type : 'feature', value : 'auto')
+ option('sdp', type : 'feature', value : 'auto')
+ option('segmentclip', type : 'feature', value : 'auto')
+ option('siren', type : 'feature', value : 'auto')
+ option('smooth', type : 'feature', value : 'auto')
+ option('speed', type : 'feature', value : 'auto')
+ option('subenc', type : 'feature', value : 'auto')
+ option('switchbin', type : 'feature', value : 'auto')
+ option('timecode', type : 'feature', value : 'auto')
+ option('videofilters', type : 'feature', value : 'auto')
+ option('videoframe_audiolevel', type : 'feature', value : 'auto')
+ option('videoparsers', type : 'feature', value : 'auto')
+ option('videosignal', type : 'feature', value : 'auto')
+ option('vmnc', type : 'feature', value : 'auto')
+ option('y4m', type : 'feature', value : 'auto')
+
+ # Feature options for libraries that need external deps
+ option('opencv', type : 'feature', value : 'auto', description : 'OpenCV computer vision library support')
+
+ # Feature options for optional deps in plugins
+ option('wayland', type : 'feature', value : 'auto', description : 'Wayland plugin/library, support in the Vulkan plugin')
+ option('x11', type : 'feature', value : 'auto', description : 'X11 support in Vulkan, GL and rfb plugins')
+
+ # Feature options for plugins that need external deps
+ option('aes', type : 'feature', value : 'auto', description : 'AES encryption/decryption plugin')
+ option('aom', type : 'feature', value : 'auto', description : 'AOM AV1 video codec plugin')
+ option('avtp', type : 'feature', value : 'auto', description : 'Audio/Video Transport Protocol (AVTP) plugin')
+ option('androidmedia', type : 'feature', value : 'auto', description : 'Video capture and codec plugins for Android')
+ option('applemedia', type : 'feature', value : 'auto', description : 'Video capture and codec access plugins for macOS and iOS')
+ option('asio', type : 'feature', value : 'auto', description : 'Steinberg Audio Streaming Input Output (ASIO) plugin')
+ option('asio-sdk-path', type : 'string', value : '', description : 'Full path to Steinberg Audio Streaming Input Output (ASIO) SDK')
+ option('assrender', type : 'feature', value : 'auto', description : 'ASS/SSA subtitle renderer plugin')
+ option('bluez', type : 'feature', value : 'auto', description : 'Bluetooth audio A2DP/AVDTP sink, AVDTP source plugin')
+ option('bs2b', type : 'feature', value : 'auto', description : 'Bauer stereophonic-to-binaural audio plugin')
+ option('bz2', type : 'feature', value : 'auto', description : 'bz2 stream encoder and decoder plugin')
+ option('chromaprint', type : 'feature', value : 'auto', description : 'Chromaprint fingerprint audio plugin')
+ option('closedcaption', type : 'feature', value : 'auto', description : 'Closed caption extractor, decoder, and overlay plugin')
+ option('colormanagement', type : 'feature', value : 'auto', description : 'Color management correction plugin')
+ option('curl', type : 'feature', value : 'auto', description : 'cURL network source and sink plugin')
+ option('curl-ssh2', type : 'feature', value : 'auto', description : 'cURL network source and sink plugin libssh2 support')
+ option('d3dvideosink', type : 'feature', value : 'auto', description : 'Direct3D video sink plugin')
+ option('d3d11', type : 'feature', value : 'auto', description : 'Direct3D11 plugin')
+ option('dash', type : 'feature', value : 'auto', description : 'DASH demuxer plugin')
+ option('dc1394', type : 'feature', value : 'auto', description : 'libdc1394 IIDC camera source plugin')
+ option('decklink', type : 'feature', value : 'auto', description : 'DeckLink audio/video source/sink plugin')
+ option('directfb', type : 'feature', value : 'auto', description : 'DirectFB video sink plugin')
+ option('directsound', type : 'feature', value : 'auto', description : 'Directsound audio source plugin')
+ option('dtls', type : 'feature', value : 'auto', description : 'DTLS encoder and decoder plugin')
+ option('dts', type : 'feature', value : 'auto', description : 'DTS audio decoder plugin')
+ option('dvb', type : 'feature', value : 'auto', description : 'DVB video bin and source plugin')
+ option('faac', type : 'feature', value : 'auto', description : 'Free AAC audio encoder plugin')
+ option('faad', type : 'feature', value : 'auto', description : 'Free AAC audio decoder plugin')
+ option('fbdev', type : 'feature', value : 'auto', description : 'Framebuffer video sink plugin')
+ option('fdkaac', type : 'feature', value : 'auto', description : 'Fraunhofer AAC audio codec plugin')
+ option('flite', type : 'feature', value : 'auto', description : 'Flite speech synthesizer source plugin')
+ option('fluidsynth', type : 'feature', value : 'auto', description : 'Fluidsynth MIDI decoder plugin')
+ option('gl', type : 'feature', value : 'auto', description : 'GStreamer OpenGL integration support (used by various plugins)')
+ option('gme', type : 'feature', value : 'auto', description : 'libgme gaming console music file decoder plugin')
+ option('gs', type : 'feature', value : 'auto', description : 'Google Cloud Storage source and sink plugin')
+ option('gsm', type : 'feature', value : 'auto', description : 'GSM encoder/decoder plugin')
+ option('ipcpipeline', type : 'feature', value : 'auto', description : 'Inter-process communication plugin')
+ option('iqa', type : 'feature', value : 'auto', description : 'Image quality assessment plugin')
+ option('kate', type : 'feature', value : 'auto', description : 'Kate subtitle parser, tagger, and codec plugin')
+ option('kms', type : 'feature', value : 'auto', description : 'KMS video sink plugin')
+ option('ladspa', type : 'feature', value : 'auto', description : 'LADSPA plugin bridge')
+ option('ldac', type : 'feature', value : 'auto', description : 'LDAC bluetooth audio codec plugin')
+ option('libde265', type : 'feature', value : 'auto', description : 'HEVC/H.265 video decoder plugin')
+ option('libmms', type : 'feature', value : 'auto', description : 'Microsoft multimedia server network source plugin')
+ option('openaptx', type : 'feature', value : 'auto', description : 'Open Source implementation of Audio Processing Technology codec (aptX) plugin')
+ option('lv2', type : 'feature', value : 'auto', description : 'LV2 audio plugin bridge')
+ option('mediafoundation', type : 'feature', value : 'auto', description : 'Microsoft Media Foundation plugin')
+ option('microdns', type : 'feature', value : 'auto', description : 'libmicrodns-based device provider')
+ option('modplug', type : 'feature', value : 'auto', description : 'ModPlug audio decoder plugin')
+ option('mpeg2enc', type : 'feature', value : 'auto', description : 'mpeg2enc video encoder plugin')
+ option('mplex', type : 'feature', value : 'auto', description : 'mplex audio/video multiplexer plugin')
+ option('msdk', type : 'feature', value : 'auto', description : 'Intel Media SDK video encoder/decoder plugin')
+ option('musepack', type : 'feature', value : 'auto', description : 'libmpcdec Musepack decoder plugin')
+ option('neon', type : 'feature', value : 'auto', description : 'NEON HTTP source plugin')
+ option('nvcodec', type : 'feature', value : 'auto', description : 'NVIDIA GPU codec plugin')
+ option('ofa', type : 'feature', value : 'auto', description : 'Open Fingerprint Architecture library plugin')
+ option('onnx', type : 'feature', value : 'auto', description : 'ONNX neural network plugin')
+ option('openal', type : 'feature', value : 'auto', description : 'OpenAL plugin')
+ option('openexr', type : 'feature', value : 'auto', description : 'OpenEXR plugin')
+ option('openh264', type : 'feature', value : 'auto', description : 'H.264 video codec plugin')
+ option('openjpeg', type : 'feature', value : 'auto', description : 'JPEG2000 image codec plugin')
+ option('openmpt', type : 'feature', value : 'auto', description : 'OpenMPT module music library plugin')
+ option('openni2', type : 'feature', value : 'auto', description : 'OpenNI2 library plugin')
+ option('opensles', type : 'feature', value : 'auto', description : 'OpenSL ES audio source/sink plugin')
+ option('opus', type : 'feature', value : 'auto', description : 'OPUS audio parser plugin')
+ option('resindvd', type : 'feature', value : 'auto', description : 'Resin DVD playback plugin')
+ option('rsvg', type : 'feature', value : 'auto', description : 'SVG overlayer and image decoder plugin')
+ option('rtmp', type : 'feature', value : 'auto', description : 'RTMP video network source and sink plugin')
+ option('sbc', type : 'feature', value : 'auto', description : 'SBC bluetooth audio codec plugin')
+ option('sctp', type : 'feature', value : 'auto', description : 'SCTP plugin')
+ option('shm', type : 'feature', value : 'auto', description : 'Shared memory source/sink plugin')
+ option('smoothstreaming', type : 'feature', value : 'auto', description : 'Microsoft Smooth Streaming demuxer plugin')
+ option('sndfile', type : 'feature', value : 'auto', description : 'libsndfile plugin')
+ option('soundtouch', type : 'feature', value : 'auto', description : 'Audio pitch controller & BPM detection plugin')
+ option('spandsp', type : 'feature', value : 'auto', description : 'Packet loss concealment audio plugin')
+ option('srt', type : 'feature', value : 'auto', description : 'Secure, Reliable, Transport client/server network source/sink plugin')
+ option('srtp', type : 'feature', value : 'auto', description : 'Secure RTP codec plugin')
+ option('svthevcenc', type : 'feature', value : 'auto', description : 'Scalable Video Technology for HEVC encoder plugin')
+ option('teletext', type : 'feature', value : 'auto', description : 'Teletext plugin')
+ option('tinyalsa', type : 'feature', value : 'auto', description : 'TinyALSA plugin')
+ option('transcode', type : 'feature', value : 'auto', description : 'Transcode plugin')
+ option('ttml', type : 'feature', value : 'auto', description : 'TTML subtitle parser and renderer plugin')
+ option('uvch264', type : 'feature', value : 'auto', description : 'UVC compliant H.264 camera source plugin')
+ option('va', type : 'feature', value : 'auto', description: 'VA-API new plugin')
+ option('voaacenc', type : 'feature', value : 'auto', description : 'AAC audio encoder plugin')
+ option('voamrwbenc', type : 'feature', value : 'auto', description : 'AMR-WB audio encoder plugin')
+ option('vulkan', type : 'feature', value : 'auto', description : 'Vulkan video sink plugin')
+ option('wasapi', type : 'feature', value : 'auto', description : 'Windows Audio Session API source/sink plugin')
+ option('wasapi2', type : 'feature', value : 'auto', description : 'Windows Audio Session API source/sink plugin with WinRT API')
+ option('webp', type : 'feature', value : 'auto', description : 'WebP image codec plugin')
+ option('webrtc', type : 'feature', value : 'auto', description : 'WebRTC audio/video network bin plugin')
+ option('webrtcdsp', type : 'feature', value : 'auto', description : 'Plugin with various audio filters provided by the WebRTC audio processing library')
+ option('wildmidi', type : 'feature', value : 'auto', description : 'WildMidi midi soft synth plugin')
+ option('winks', type : 'feature', value : 'auto', description : 'Windows Kernel Streaming video source plugin')
+ option('winscreencap', type : 'feature', value : 'auto', description : 'Windows Screen Capture video source plugin')
+ option('x265', type : 'feature', value : 'auto', description : 'HEVC/H.265 video encoder plugin')
+ option('zbar', type : 'feature', value : 'auto', description : 'Barcode image scanner plugin using zbar library')
+ option('zxing', type : 'feature', value : 'auto', description : 'Barcode image scanner plugin using zxing-cpp library')
+ option('wpe', type : 'feature', value : 'auto', description : 'WPE Web browser plugin')
+ option('magicleap', type : 'feature', value : 'auto', description : 'Magic Leap platform support')
+ option('v4l2codecs', type : 'feature', value : 'auto', description : 'Video4Linux Stateless CODECs support')
+ option('isac', type : 'feature', value : 'auto', description : 'iSAC plugin')
+
+ # HLS plugin options
+ option('hls', type : 'feature', value : 'auto', description : 'HTTP Live Streaming plugin')
+ option('hls-crypto', type : 'combo', value : 'auto', choices : ['auto', 'nettle', 'libgcrypt', 'openssl'],
+ description: 'Crypto library to use for HLS plugin')
+
+ # SCTP plugin options
+ option('sctp-internal-usrsctp', type: 'feature', value : 'enabled',
+ description: 'Whether to use the bundled usrsctp library or the system one')
+
+ # MSDK plugin options
+ option('mfx_api', type : 'combo', choices : ['MSDK', 'oneVPL', 'auto'], value : 'auto',
+ description : 'Select MFX API to build against')
+
+ # Common feature options
+ option('examples', type : 'feature', value : 'auto', yield : true)
+ option('tests', type : 'feature', value : 'auto', yield : true)
+ option('introspection', type : 'feature', value : 'auto', yield : true, description : 'Generate gobject-introspection bindings')
+ option('nls', type : 'feature', value : 'auto', yield: true, description : 'Enable native language support (translations)')
+ option('orc', type : 'feature', value : 'auto', yield : true)
+ option('gobject-cast-checks', type : 'feature', value : 'auto', yield : true,
+ description: 'Enable run-time GObject cast checks (auto = enabled for development, disabled for stable releases)')
+ option('glib-asserts', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib assertion (auto = enabled for development, disabled for stable releases)')
+ option('glib-checks', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib checks such as API guards (auto = enabled for development, disabled for stable releases)')
+ option('extra-checks', type : 'feature', value : 'enabled', yield : true, description : 'Enable extra runtime checks')
+
+ # Common options
+ option('package-name', type : 'string', yield : true,
+ description : 'package name to use in plugins')
+ option('package-origin', type : 'string', value : 'Unknown package origin', yield : true,
+ description : 'package origin URL to use in plugins')
+ option('doc', type : 'feature', value : 'auto', yield: true,
+ description: 'Enable documentation.')
++
++# Tizen Options
++option('tv-profile', type : 'boolean', value : false,
++ description : 'tv-profile')
--- /dev/null
--- /dev/null
++# SOME DESCRIPTIVE TITLE.
++# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
++# This file is distributed under the same license as the gst-plugins-bad-1.0 package.
++# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
++#
++#, fuzzy
++msgid ""
++msgstr ""
++"Project-Id-Version: gst-plugins-bad-1.0\n"
++"Report-Msgid-Bugs-To: \n"
++"POT-Creation-Date: 2021-09-23 01:34+0100\n"
++"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
++"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
++"Language-Team: LANGUAGE <LL@li.org>\n"
++"Language: \n"
++"MIME-Version: 1.0\n"
++"Content-Type: text/plain; charset=CHARSET\n"
++"Content-Transfer-Encoding: 8bit\n"
++
++#: ext/curl/gstcurlhttpsrc.c:1439
++msgid "No URL set."
++msgstr ""
++
++#: ext/opencv/gsttemplatematch.cpp:189
++msgid "OpenCV failed to load template image"
++msgstr ""
++
++#: ext/resindvd/resindvdsrc.c:361
++msgid "Could not read title information for DVD."
++msgstr ""
++
++#: ext/resindvd/resindvdsrc.c:367
++#, c-format
++msgid "Failed to open DVD device '%s'."
++msgstr ""
++
++#: ext/resindvd/resindvdsrc.c:373
++msgid "Failed to set PGC based seeking."
++msgstr ""
++
++#: ext/resindvd/resindvdsrc.c:1164
++msgid ""
++"Could not read DVD. This may be because the DVD is encrypted and a DVD "
++"decryption library is not installed."
++msgstr ""
++
++#: ext/resindvd/resindvdsrc.c:1169 ext/resindvd/resindvdsrc.c:1178
++msgid "Could not read DVD."
++msgstr ""
++
++#: ext/smoothstreaming/gstmssdemux.c:430
++#: gst-libs/gst/adaptivedemux/gstadaptivedemux.c:735
++msgid "This file contains no playable streams."
++msgstr ""
++
++#: ext/sndfile/gstsfdec.c:771
++msgid "Could not open sndfile stream for reading."
++msgstr ""
++
++#: gst/asfmux/gstasfmux.c:1834
++msgid "Generated file has a larger preroll time than its streams duration"
++msgstr ""
++
++#: gst/camerabin2/camerabingeneral.c:167 gst/camerabin2/gstcamerabin2.c:1866
++#: gst/camerabin2/gstdigitalzoom.c:283 gst/camerabin2/gstviewfinderbin.c:275
++#, c-format
++msgid "Missing element '%s' - check your GStreamer installation."
++msgstr ""
++
++#: gst/camerabin2/gstcamerabin2.c:352
++msgid "File location is set to NULL, please set it to a valid filename"
++msgstr ""
++
++#: gst/camerabin2/gstwrappercamerabinsrc.c:587
++msgid "Digitalzoom element couldn't be created"
++msgstr ""
++
++#: gst/dvdspu/gstdvdspu.c:1044
++msgid "Subpicture format was not configured before data flow"
++msgstr ""
++
++#: gst-libs/gst/adaptivedemux/gstadaptivedemux.c:3626
++msgid "Failed to get fragment URL."
++msgstr ""
++
++#: gst-libs/gst/adaptivedemux/gstadaptivedemux.c:4013
++#, c-format
++msgid "Couldn't download fragments"
++msgstr ""
++
++#: gst-libs/gst/adaptivedemux/gstadaptivedemux.c:4102
++#: gst/mpegtsdemux/mpegtsbase.c:1693
++msgid "Internal data stream error."
++msgstr ""
++
++#: sys/dvb/gstdvbsrc.c:1597 sys/dvb/gstdvbsrc.c:1811
++#, c-format
++msgid "Device \"%s\" does not exist."
++msgstr ""
++
++#: sys/dvb/gstdvbsrc.c:1601
++#, c-format
++msgid "Could not open frontend device \"%s\"."
++msgstr ""
++
++#: sys/dvb/gstdvbsrc.c:1620
++#, c-format
++msgid "Could not get settings from frontend device \"%s\"."
++msgstr ""
++
++#: sys/dvb/gstdvbsrc.c:1637
++#, c-format
++msgid "Cannot enumerate delivery systems from frontend device \"%s\"."
++msgstr ""
++
++#: sys/dvb/gstdvbsrc.c:1815
++#, c-format
++msgid "Could not open file \"%s\" for reading."
++msgstr ""
++
++#: sys/dvb/parsechannels.c:410
++#, c-format
++msgid "Couldn't find channel configuration file"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:413 sys/dvb/parsechannels.c:563
++#, c-format
++msgid "Couldn't load channel configuration file: '%s'"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:421 sys/dvb/parsechannels.c:846
++#, c-format
++msgid "Couldn't find details for channel '%s'"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:430
++#, c-format
++msgid "No properties for channel '%s'"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:439
++#, c-format
++msgid "Failed to set properties for channel '%s'"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:560
++#, c-format
++msgid "Couldn't find channel configuration file: '%s'"
++msgstr ""
++
++#: sys/dvb/parsechannels.c:570
++#, c-format
++msgid "Channel configuration file doesn't contain any channels"
++msgstr ""
--- /dev/null
- GstCaps *src_caps;
+ /*
+ * Copyright (C) 2010, 2013 Ole André Vadla Ravnås <oleavr@soundrop.com>
+ * Copyright (C) 2013 Intel Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "vtenc.h"
+
+ #include "coremediabuffer.h"
+ #include "corevideobuffer.h"
+ #include "vtutil.h"
+ #include <gst/pbutils/codec-utils.h>
+
+ #define VTENC_DEFAULT_USAGE 6 /* Profile: Baseline Level: 2.1 */
+ #define VTENC_DEFAULT_BITRATE 0
+ #define VTENC_DEFAULT_FRAME_REORDERING TRUE
+ #define VTENC_DEFAULT_REALTIME FALSE
+ #define VTENC_DEFAULT_QUALITY 0.5
+ #define VTENC_DEFAULT_MAX_KEYFRAME_INTERVAL 0
+ #define VTENC_DEFAULT_MAX_KEYFRAME_INTERVAL_DURATION 0
+
+ GST_DEBUG_CATEGORY (gst_vtenc_debug);
+ #define GST_CAT_DEFAULT (gst_vtenc_debug)
+
+ #define GST_VTENC_CODEC_DETAILS_QDATA \
+ g_quark_from_static_string ("vtenc-codec-details")
+
+ /* define EnableHardwareAcceleratedVideoEncoder in < 10.9 */
+ #if defined(MAC_OS_X_VERSION_MAX_ALLOWED) && MAC_OS_X_VERSION_MAX_ALLOWED < 1090
+ const CFStringRef
+ kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder =
+ CFSTR ("EnableHardwareAcceleratedVideoEncoder");
+ const CFStringRef
+ kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder =
+ CFSTR ("RequireHardwareAcceleratedVideoEncoder");
+ const CFStringRef kVTCompressionPropertyKey_ProfileLevel =
+ CFSTR ("ProfileLevel");
+ const CFStringRef kVTProfileLevel_H264_Baseline_AutoLevel =
+ CFSTR ("H264_Baseline_AutoLevel");
+ #endif
+
+ #if defined(MAC_OS_X_VERSION_MAX_ALLOWED) && MAC_OS_X_VERSION_MAX_ALLOWED < 1080
+ const CFStringRef kVTCompressionPropertyKey_Quality = CFSTR ("Quality");
+ #endif
+
+ #ifdef HAVE_VIDEOTOOLBOX_10_9_6
+ extern OSStatus
+ VTCompressionSessionPrepareToEncodeFrames (VTCompressionSessionRef session)
+ __attribute__ ((weak_import));
+ #endif
+
+ enum
+ {
+ PROP_0,
+ PROP_USAGE,
+ PROP_BITRATE,
+ PROP_ALLOW_FRAME_REORDERING,
+ PROP_REALTIME,
+ PROP_QUALITY,
+ PROP_MAX_KEYFRAME_INTERVAL,
+ PROP_MAX_KEYFRAME_INTERVAL_DURATION
+ };
+
+ typedef struct _GstVTEncFrame GstVTEncFrame;
+
+ struct _GstVTEncFrame
+ {
+ GstBuffer *buf;
+ GstVideoFrame videoframe;
+ };
+
+ static GstElementClass *parent_class = NULL;
+
+ static void gst_vtenc_get_property (GObject * obj, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_vtenc_set_property (GObject * obj, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_vtenc_finalize (GObject * obj);
+
+ static gboolean gst_vtenc_start (GstVideoEncoder * enc);
+ static gboolean gst_vtenc_stop (GstVideoEncoder * enc);
+ static gboolean gst_vtenc_set_format (GstVideoEncoder * enc,
+ GstVideoCodecState * input_state);
+ static GstFlowReturn gst_vtenc_handle_frame (GstVideoEncoder * enc,
+ GstVideoCodecFrame * frame);
+ static GstFlowReturn gst_vtenc_finish (GstVideoEncoder * enc);
+ static gboolean gst_vtenc_flush (GstVideoEncoder * enc);
+
+ static void gst_vtenc_clear_cached_caps_downstream (GstVTEnc * self);
+
+ static VTCompressionSessionRef gst_vtenc_create_session (GstVTEnc * self);
+ static void gst_vtenc_destroy_session (GstVTEnc * self,
+ VTCompressionSessionRef * session);
+ static void gst_vtenc_session_dump_properties (GstVTEnc * self,
+ VTCompressionSessionRef session);
+ static void gst_vtenc_session_configure_expected_framerate (GstVTEnc * self,
+ VTCompressionSessionRef session, gdouble framerate);
+ static void gst_vtenc_session_configure_max_keyframe_interval (GstVTEnc * self,
+ VTCompressionSessionRef session, gint interval);
+ static void gst_vtenc_session_configure_max_keyframe_interval_duration
+ (GstVTEnc * self, VTCompressionSessionRef session, gdouble duration);
+ static void gst_vtenc_session_configure_bitrate (GstVTEnc * self,
+ VTCompressionSessionRef session, guint bitrate);
+ static OSStatus gst_vtenc_session_configure_property_int (GstVTEnc * self,
+ VTCompressionSessionRef session, CFStringRef name, gint value);
+ static OSStatus gst_vtenc_session_configure_property_double (GstVTEnc * self,
+ VTCompressionSessionRef session, CFStringRef name, gdouble value);
+ static void gst_vtenc_session_configure_allow_frame_reordering (GstVTEnc * self,
+ VTCompressionSessionRef session, gboolean allow_frame_reordering);
+ static void gst_vtenc_session_configure_realtime (GstVTEnc * self,
+ VTCompressionSessionRef session, gboolean realtime);
+
+ static GstFlowReturn gst_vtenc_encode_frame (GstVTEnc * self,
+ GstVideoCodecFrame * frame);
+ static void gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
+ void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags,
+ CMSampleBufferRef sampleBuffer);
+ static gboolean gst_vtenc_buffer_is_keyframe (GstVTEnc * self,
+ CMSampleBufferRef sbuf);
+
+
+ #ifndef HAVE_IOS
+ static GstVTEncFrame *gst_vtenc_frame_new (GstBuffer * buf,
+ GstVideoInfo * videoinfo);
+ static void gst_vtenc_frame_free (GstVTEncFrame * frame);
+
+ static void gst_pixel_buffer_release_cb (void *releaseRefCon,
+ const void *dataPtr, size_t dataSize, size_t numberOfPlanes,
+ const void *planeAddresses[]);
+ #endif
+
+ #ifdef HAVE_IOS
+ static GstStaticCaps sink_caps =
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ NV12, I420 }"));
+ #else
+ static GstStaticCaps sink_caps =
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ UYVY, NV12, I420 }"));
+ #endif
+
+ static void
+ gst_vtenc_base_init (GstVTEncClass * klass)
+ {
+ const GstVTEncoderDetails *codec_details =
+ GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ const int min_width = 1, max_width = G_MAXINT;
+ const int min_height = 1, max_height = G_MAXINT;
+ const int min_fps_n = 0, max_fps_n = G_MAXINT;
+ const int min_fps_d = 1, max_fps_d = 1;
+ GstPadTemplate *sink_template, *src_template;
- GST_PAD_SINK, GST_PAD_ALWAYS, gst_static_caps_get (&sink_caps));
++ GstCaps *src_caps, *caps;
+ gchar *longname, *description;
+
+ longname = g_strdup_printf ("%s encoder", codec_details->name);
+ description = g_strdup_printf ("%s encoder", codec_details->name);
+
+ gst_element_class_set_metadata (element_class, longname,
+ "Codec/Encoder/Video/Hardware", description,
+ "Ole André Vadla Ravnås <oleavr@soundrop.com>, Dominik Röttsches <dominik.rottsches@intel.com>");
+
+ g_free (longname);
+ g_free (description);
+
++ caps = gst_static_caps_get (&sink_caps);
+ sink_template = gst_pad_template_new ("sink",
++ GST_PAD_SINK, GST_PAD_ALWAYS, caps);
+ gst_element_class_add_pad_template (element_class, sink_template);
++ gst_caps_unref (caps);
+
+ src_caps = gst_caps_new_simple (codec_details->mimetype,
+ "width", GST_TYPE_INT_RANGE, min_width, max_width,
+ "height", GST_TYPE_INT_RANGE, min_height, max_height,
+ "framerate", GST_TYPE_FRACTION_RANGE,
+ min_fps_n, min_fps_d, max_fps_n, max_fps_d, NULL);
+ if (codec_details->format_id == kCMVideoCodecType_H264) {
+ gst_structure_set (gst_caps_get_structure (src_caps, 0),
+ "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ }
+ src_template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ src_caps);
+ gst_element_class_add_pad_template (element_class, src_template);
+ gst_caps_unref (src_caps);
+ }
+
+ static void
+ gst_vtenc_class_init (GstVTEncClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstVideoEncoderClass *gstvideoencoder_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstvideoencoder_class = (GstVideoEncoderClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->get_property = gst_vtenc_get_property;
+ gobject_class->set_property = gst_vtenc_set_property;
+ gobject_class->finalize = gst_vtenc_finalize;
+
+ gstvideoencoder_class->start = gst_vtenc_start;
+ gstvideoencoder_class->stop = gst_vtenc_stop;
+ gstvideoencoder_class->set_format = gst_vtenc_set_format;
+ gstvideoencoder_class->handle_frame = gst_vtenc_handle_frame;
+ gstvideoencoder_class->finish = gst_vtenc_finish;
+ gstvideoencoder_class->flush = gst_vtenc_flush;
+
+ g_object_class_install_property (gobject_class, PROP_BITRATE,
+ g_param_spec_uint ("bitrate", "Bitrate",
+ "Target video bitrate in kbps (0 = auto)",
+ 0, G_MAXUINT, VTENC_DEFAULT_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_ALLOW_FRAME_REORDERING,
+ g_param_spec_boolean ("allow-frame-reordering", "Allow frame reordering",
+ "Whether to allow frame reordering or not",
+ VTENC_DEFAULT_FRAME_REORDERING,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_REALTIME,
+ g_param_spec_boolean ("realtime", "Realtime",
+ "Configure the encoder for realtime output",
+ VTENC_DEFAULT_REALTIME,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_QUALITY,
+ g_param_spec_double ("quality", "Quality",
+ "The desired compression quality",
+ 0.0, 1.0, VTENC_DEFAULT_QUALITY,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_KEYFRAME_INTERVAL,
+ g_param_spec_int ("max-keyframe-interval", "Max Keyframe Interval",
+ "Maximum number of frames between keyframes (0 = auto)",
+ 0, G_MAXINT, VTENC_DEFAULT_MAX_KEYFRAME_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_MAX_KEYFRAME_INTERVAL_DURATION,
+ g_param_spec_uint64 ("max-keyframe-interval-duration",
+ "Max Keyframe Interval Duration",
+ "Maximum number of nanoseconds between keyframes (0 = no limit)", 0,
+ G_MAXUINT64, VTENC_DEFAULT_MAX_KEYFRAME_INTERVAL_DURATION,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_vtenc_init (GstVTEnc * self)
+ {
+ GstVTEncClass *klass = (GstVTEncClass *) G_OBJECT_GET_CLASS (self);
+ CFStringRef keyframe_props_keys[] = { kVTEncodeFrameOptionKey_ForceKeyFrame };
+ CFBooleanRef keyframe_props_values[] = { kCFBooleanTrue };
+
+ self->details = GST_VTENC_CLASS_GET_CODEC_DETAILS (klass);
+
+ /* These could be controlled by properties later */
+ self->dump_properties = FALSE;
+ self->dump_attributes = FALSE;
+ self->latency_frames = -1;
+ self->session = NULL;
+ self->profile_level = NULL;
+
+ self->keyframe_props =
+ CFDictionaryCreate (NULL, (const void **) keyframe_props_keys,
+ (const void **) keyframe_props_values, G_N_ELEMENTS (keyframe_props_keys),
+ &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
+ }
+
+ static void
+ gst_vtenc_finalize (GObject * obj)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (obj);
+
+ CFRelease (self->keyframe_props);
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+ }
+
+ static guint
+ gst_vtenc_get_bitrate (GstVTEnc * self)
+ {
+ guint result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->bitrate;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_bitrate (GstVTEnc * self, guint bitrate)
+ {
+ GST_OBJECT_LOCK (self);
+
+ self->bitrate = bitrate;
+
+ if (self->session != NULL)
+ gst_vtenc_session_configure_bitrate (self, self->session, bitrate);
+
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static gboolean
+ gst_vtenc_get_allow_frame_reordering (GstVTEnc * self)
+ {
+ gboolean result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->allow_frame_reordering;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_allow_frame_reordering (GstVTEnc * self,
+ gboolean allow_frame_reordering)
+ {
+ GST_OBJECT_LOCK (self);
+ self->allow_frame_reordering = allow_frame_reordering;
+ if (self->session != NULL) {
+ gst_vtenc_session_configure_allow_frame_reordering (self,
+ self->session, allow_frame_reordering);
+ }
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static gboolean
+ gst_vtenc_get_realtime (GstVTEnc * self)
+ {
+ gboolean result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->realtime;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_realtime (GstVTEnc * self, gboolean realtime)
+ {
+ GST_OBJECT_LOCK (self);
+ self->realtime = realtime;
+ if (self->session != NULL)
+ gst_vtenc_session_configure_realtime (self, self->session, realtime);
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static gdouble
+ gst_vtenc_get_quality (GstVTEnc * self)
+ {
+ gdouble result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->quality;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_quality (GstVTEnc * self, gdouble quality)
+ {
+ GST_OBJECT_LOCK (self);
+ self->quality = quality;
+ GST_INFO_OBJECT (self, "setting quality %f", quality);
+ if (self->session != NULL) {
+ gst_vtenc_session_configure_property_double (self, self->session,
+ kVTCompressionPropertyKey_Quality, quality);
+ }
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static gint
+ gst_vtenc_get_max_keyframe_interval (GstVTEnc * self)
+ {
+ gint result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->max_keyframe_interval;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_max_keyframe_interval (GstVTEnc * self, gint interval)
+ {
+ GST_OBJECT_LOCK (self);
+ self->max_keyframe_interval = interval;
+ if (self->session != NULL) {
+ gst_vtenc_session_configure_max_keyframe_interval (self, self->session,
+ interval);
+ }
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static GstClockTime
+ gst_vtenc_get_max_keyframe_interval_duration (GstVTEnc * self)
+ {
+ GstClockTime result;
+
+ GST_OBJECT_LOCK (self);
+ result = self->max_keyframe_interval_duration;
+ GST_OBJECT_UNLOCK (self);
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_set_max_keyframe_interval_duration (GstVTEnc * self,
+ GstClockTime interval)
+ {
+ GST_OBJECT_LOCK (self);
+ self->max_keyframe_interval_duration = interval;
+ if (self->session != NULL) {
+ gst_vtenc_session_configure_max_keyframe_interval_duration (self,
+ self->session, interval / ((gdouble) GST_SECOND));
+ }
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ static void
+ gst_vtenc_get_property (GObject * obj, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (obj);
+
+ switch (prop_id) {
+ case PROP_BITRATE:
+ g_value_set_uint (value, gst_vtenc_get_bitrate (self) / 1000);
+ break;
+ case PROP_ALLOW_FRAME_REORDERING:
+ g_value_set_boolean (value, gst_vtenc_get_allow_frame_reordering (self));
+ break;
+ case PROP_REALTIME:
+ g_value_set_boolean (value, gst_vtenc_get_realtime (self));
+ break;
+ case PROP_QUALITY:
+ g_value_set_double (value, gst_vtenc_get_quality (self));
+ break;
+ case PROP_MAX_KEYFRAME_INTERVAL:
+ g_value_set_int (value, gst_vtenc_get_max_keyframe_interval (self));
+ break;
+ case PROP_MAX_KEYFRAME_INTERVAL_DURATION:
+ g_value_set_uint64 (value,
+ gst_vtenc_get_max_keyframe_interval_duration (self));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_vtenc_set_property (GObject * obj, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (obj);
+
+ switch (prop_id) {
+ case PROP_BITRATE:
+ gst_vtenc_set_bitrate (self, g_value_get_uint (value) * 1000);
+ break;
+ case PROP_ALLOW_FRAME_REORDERING:
+ gst_vtenc_set_allow_frame_reordering (self, g_value_get_boolean (value));
+ break;
+ case PROP_REALTIME:
+ gst_vtenc_set_realtime (self, g_value_get_boolean (value));
+ break;
+ case PROP_QUALITY:
+ gst_vtenc_set_quality (self, g_value_get_double (value));
+ break;
+ case PROP_MAX_KEYFRAME_INTERVAL:
+ gst_vtenc_set_max_keyframe_interval (self, g_value_get_int (value));
+ break;
+ case PROP_MAX_KEYFRAME_INTERVAL_DURATION:
+ gst_vtenc_set_max_keyframe_interval_duration (self,
+ g_value_get_uint64 (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (obj, prop_id, pspec);
+ break;
+ }
+ }
+
+ static GstFlowReturn
+ gst_vtenc_finish_encoding (GstVTEnc * self, gboolean is_flushing)
+ {
+ GST_DEBUG_OBJECT (self,
+ "complete encoding and clean buffer queue, is flushing %d", is_flushing);
+ GstVideoCodecFrame *outframe;
+ GstFlowReturn ret = GST_FLOW_OK;
+ OSStatus vt_status;
+
+ /* We need to unlock the stream lock here because
+ * it can wait for gst_vtenc_enqueue_buffer() to
+ * handle a buffer... which will take the stream
+ * lock from another thread and then deadlock */
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
+ GST_DEBUG_OBJECT (self, "starting VTCompressionSessionCompleteFrames");
+ vt_status =
+ VTCompressionSessionCompleteFrames (self->session,
+ kCMTimePositiveInfinity);
+ GST_DEBUG_OBJECT (self, "VTCompressionSessionCompleteFrames ended");
+ GST_VIDEO_ENCODER_STREAM_LOCK (self);
+ if (vt_status != noErr) {
+ GST_WARNING_OBJECT (self, "VTCompressionSessionCompleteFrames returned %d",
+ (int) vt_status);
+ }
+
+ while ((outframe = g_async_queue_try_pop (self->cur_outframes))) {
+ if (is_flushing) {
+ GST_DEBUG_OBJECT (self, "flushing frame number %d",
+ outframe->system_frame_number);
+ gst_video_codec_frame_unref (outframe);
+ } else {
+ GST_DEBUG_OBJECT (self, "finish frame number %d",
+ outframe->system_frame_number);
+ ret =
+ gst_video_encoder_finish_frame (GST_VIDEO_ENCODER_CAST (self),
+ outframe);
+ }
+ }
+
+ GST_DEBUG_OBJECT (self, "buffer queue cleaned");
+
+ return ret;
+ }
+
+ static gboolean
+ gst_vtenc_start (GstVideoEncoder * enc)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+
+ self->cur_outframes = g_async_queue_new ();
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_vtenc_stop (GstVideoEncoder * enc)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (self);
+ gst_vtenc_flush (enc);
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
+
+ GST_OBJECT_LOCK (self);
+ gst_vtenc_destroy_session (self, &self->session);
+ GST_OBJECT_UNLOCK (self);
+
+ if (self->profile_level)
+ CFRelease (self->profile_level);
+ self->profile_level = NULL;
+
+ if (self->input_state)
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = NULL;
+
+ self->negotiated_width = self->negotiated_height = 0;
+ self->negotiated_fps_n = self->negotiated_fps_d = 0;
+
+ gst_vtenc_clear_cached_caps_downstream (self);
+
+ g_async_queue_unref (self->cur_outframes);
+ self->cur_outframes = NULL;
+
+ return TRUE;
+ }
+
+ static CFStringRef
+ gst_vtenc_profile_level_key (GstVTEnc * self, const gchar * profile,
+ const gchar * level_arg)
+ {
+ char level[64];
+ gchar *key = NULL;
+ CFStringRef ret = NULL;
+
+ if (profile == NULL)
+ profile = "main";
+ if (level_arg == NULL)
+ level_arg = "AutoLevel";
+ strncpy (level, level_arg, sizeof (level));
+
+ if (!strcmp (profile, "constrained-baseline") ||
+ !strcmp (profile, "baseline")) {
+ profile = "Baseline";
+ } else if (g_str_has_prefix (profile, "high")) {
+ profile = "High";
+ } else if (!strcmp (profile, "main")) {
+ profile = "Main";
+ } else {
+ g_assert_not_reached ();
+ }
+
+ if (strlen (level) == 1) {
+ level[1] = '_';
+ level[2] = '0';
+ } else if (strlen (level) == 3) {
+ level[1] = '_';
+ }
+
+ key = g_strdup_printf ("H264_%s_%s", profile, level);
+ ret = CFStringCreateWithBytes (NULL, (const guint8 *) key, strlen (key),
+ kCFStringEncodingASCII, 0);
+
+ GST_INFO_OBJECT (self, "negotiated profile and level %s", key);
+
+ g_free (key);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_vtenc_negotiate_profile_and_level (GstVideoEncoder * enc)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+ GstCaps *allowed_caps = NULL;
+ gboolean ret = TRUE;
+ const gchar *profile = NULL;
+ const gchar *level = NULL;
+
+ allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (enc));
+ if (allowed_caps) {
+ GstStructure *s;
+
+ if (gst_caps_is_empty (allowed_caps)) {
+ GST_ERROR_OBJECT (self, "no allowed downstream caps");
+ goto fail;
+ }
+
+ allowed_caps = gst_caps_make_writable (allowed_caps);
+ allowed_caps = gst_caps_fixate (allowed_caps);
+ s = gst_caps_get_structure (allowed_caps, 0);
+
+ profile = gst_structure_get_string (s, "profile");
+ level = gst_structure_get_string (s, "level");
+ }
+
+ if (self->profile_level)
+ CFRelease (self->profile_level);
+ self->profile_level = gst_vtenc_profile_level_key (self, profile, level);
+ if (self->profile_level == NULL) {
+ GST_ERROR_OBJECT (enc, "invalid profile and level");
+ goto fail;
+ }
+
+ out:
+ if (allowed_caps)
+ gst_caps_unref (allowed_caps);
+
+ return ret;
+
+ fail:
+ ret = FALSE;
+ goto out;
+ }
+
+ static gboolean
+ gst_vtenc_set_format (GstVideoEncoder * enc, GstVideoCodecState * state)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+ VTCompressionSessionRef session;
+
+ if (self->input_state)
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = gst_video_codec_state_ref (state);
+
+ self->negotiated_width = state->info.width;
+ self->negotiated_height = state->info.height;
+ self->negotiated_fps_n = state->info.fps_n;
+ self->negotiated_fps_d = state->info.fps_d;
+ self->video_info = state->info;
+
+ GST_OBJECT_LOCK (self);
+ gst_vtenc_destroy_session (self, &self->session);
+ GST_OBJECT_UNLOCK (self);
+
+ gst_vtenc_negotiate_profile_and_level (enc);
+
+ session = gst_vtenc_create_session (self);
+ GST_OBJECT_LOCK (self);
+ self->session = session;
+ GST_OBJECT_UNLOCK (self);
+
+ return session != NULL;
+ }
+
+ static gboolean
+ gst_vtenc_is_negotiated (GstVTEnc * self)
+ {
+ return self->negotiated_width != 0;
+ }
+
+ static gboolean
+ gst_vtenc_negotiate_downstream (GstVTEnc * self, CMSampleBufferRef sbuf)
+ {
+ gboolean result;
+ GstCaps *caps;
+ GstStructure *s;
+ GstVideoCodecState *state;
+
+ if (self->caps_width == self->negotiated_width &&
+ self->caps_height == self->negotiated_height &&
+ self->caps_fps_n == self->negotiated_fps_n &&
+ self->caps_fps_d == self->negotiated_fps_d) {
+ return TRUE;
+ }
+
+ caps = gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (self));
+ caps = gst_caps_make_writable (caps);
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_set (s,
+ "width", G_TYPE_INT, self->negotiated_width,
+ "height", G_TYPE_INT, self->negotiated_height,
+ "framerate", GST_TYPE_FRACTION,
+ self->negotiated_fps_n, self->negotiated_fps_d, NULL);
+
+ if (self->details->format_id == kCMVideoCodecType_H264) {
+ CMFormatDescriptionRef fmt;
+ CFDictionaryRef atoms;
+ CFStringRef avccKey;
+ CFDataRef avcc;
+ guint8 *codec_data;
+ gsize codec_data_size;
+ GstBuffer *codec_data_buf;
+ guint8 sps[3];
+
+ fmt = CMSampleBufferGetFormatDescription (sbuf);
+ atoms = CMFormatDescriptionGetExtension (fmt,
+ kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms);
+ avccKey = CFStringCreateWithCString (NULL, "avcC", kCFStringEncodingUTF8);
+ avcc = CFDictionaryGetValue (atoms, avccKey);
+ CFRelease (avccKey);
+ codec_data_size = CFDataGetLength (avcc);
+ codec_data = g_malloc (codec_data_size);
+ CFDataGetBytes (avcc, CFRangeMake (0, codec_data_size), codec_data);
+ codec_data_buf = gst_buffer_new_wrapped (codec_data, codec_data_size);
+
+ gst_structure_set (s, "codec_data", GST_TYPE_BUFFER, codec_data_buf, NULL);
+
+ sps[0] = codec_data[1];
+ sps[1] = codec_data[2] & ~0xDF;
+ sps[2] = codec_data[3];
+
+ gst_codec_utils_h264_caps_set_level_and_profile (caps, sps, 3);
+
+ gst_buffer_unref (codec_data_buf);
+ }
+
+ state =
+ gst_video_encoder_set_output_state (GST_VIDEO_ENCODER_CAST (self), caps,
+ self->input_state);
+ gst_video_codec_state_unref (state);
+ result = gst_video_encoder_negotiate (GST_VIDEO_ENCODER_CAST (self));
+
+ self->caps_width = self->negotiated_width;
+ self->caps_height = self->negotiated_height;
+ self->caps_fps_n = self->negotiated_fps_n;
+ self->caps_fps_d = self->negotiated_fps_d;
+
+ return result;
+ }
+
+ static void
+ gst_vtenc_clear_cached_caps_downstream (GstVTEnc * self)
+ {
+ self->caps_width = self->caps_height = 0;
+ self->caps_fps_n = self->caps_fps_d = 0;
+ }
+
+ static GstFlowReturn
+ gst_vtenc_handle_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+
+ if (!gst_vtenc_is_negotiated (self))
+ goto not_negotiated;
+
+ return gst_vtenc_encode_frame (self, frame);
+
+ not_negotiated:
+ gst_video_codec_frame_unref (frame);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ static GstFlowReturn
+ gst_vtenc_finish (GstVideoEncoder * enc)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+ return gst_vtenc_finish_encoding (self, FALSE);
+ }
+
+ static gboolean
+ gst_vtenc_flush (GstVideoEncoder * enc)
+ {
+ GstVTEnc *self = GST_VTENC_CAST (enc);
+ GstFlowReturn ret;
+
+ ret = gst_vtenc_finish_encoding (self, TRUE);
+
+ return (ret == GST_FLOW_OK);
+ }
+
+ static VTCompressionSessionRef
+ gst_vtenc_create_session (GstVTEnc * self)
+ {
+ VTCompressionSessionRef session = NULL;
+ CFMutableDictionaryRef encoder_spec = NULL, pb_attrs;
+ OSStatus status;
+
+ #if !HAVE_IOS
+ const GstVTEncoderDetails *codec_details =
+ GST_VTENC_CLASS_GET_CODEC_DETAILS (G_OBJECT_GET_CLASS (self));
+
+ encoder_spec =
+ CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks);
+ gst_vtutil_dict_set_boolean (encoder_spec,
+ kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder, true);
+ if (codec_details->require_hardware)
+ gst_vtutil_dict_set_boolean (encoder_spec,
+ kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder,
+ TRUE);
+ #endif
+
+ pb_attrs = CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks);
+ gst_vtutil_dict_set_i32 (pb_attrs, kCVPixelBufferWidthKey,
+ self->negotiated_width);
+ gst_vtutil_dict_set_i32 (pb_attrs, kCVPixelBufferHeightKey,
+ self->negotiated_height);
+
+ status = VTCompressionSessionCreate (NULL,
+ self->negotiated_width, self->negotiated_height,
+ self->details->format_id, encoder_spec, pb_attrs, NULL,
+ gst_vtenc_enqueue_buffer, self, &session);
+ GST_INFO_OBJECT (self, "VTCompressionSessionCreate for %d x %d => %d",
+ self->negotiated_width, self->negotiated_height, (int) status);
+ if (status != noErr) {
+ GST_ERROR_OBJECT (self, "VTCompressionSessionCreate() returned: %d",
+ (int) status);
+ goto beach;
+ }
+
+ gst_vtenc_session_configure_expected_framerate (self, session,
+ (gdouble) self->negotiated_fps_n / (gdouble) self->negotiated_fps_d);
+
+ status = VTSessionSetProperty (session,
+ kVTCompressionPropertyKey_ProfileLevel, self->profile_level);
+ GST_DEBUG_OBJECT (self, "kVTCompressionPropertyKey_ProfileLevel => %d",
+ (int) status);
+
+ status = VTSessionSetProperty (session,
+ kVTCompressionPropertyKey_AllowTemporalCompression, kCFBooleanTrue);
+ GST_DEBUG_OBJECT (self,
+ "kVTCompressionPropertyKey_AllowTemporalCompression => %d", (int) status);
+
+ gst_vtenc_session_configure_max_keyframe_interval (self, session,
+ self->max_keyframe_interval);
+ gst_vtenc_session_configure_max_keyframe_interval_duration (self, session,
+ self->max_keyframe_interval_duration / ((gdouble) GST_SECOND));
+
+ gst_vtenc_session_configure_bitrate (self, session,
+ gst_vtenc_get_bitrate (self));
+ gst_vtenc_session_configure_realtime (self, session,
+ gst_vtenc_get_realtime (self));
+ gst_vtenc_session_configure_allow_frame_reordering (self, session,
+ gst_vtenc_get_allow_frame_reordering (self));
+ gst_vtenc_set_quality (self, self->quality);
+
+ if (self->dump_properties) {
+ gst_vtenc_session_dump_properties (self, session);
+ self->dump_properties = FALSE;
+ }
+ #ifdef HAVE_VIDEOTOOLBOX_10_9_6
+ if (VTCompressionSessionPrepareToEncodeFrames) {
+ status = VTCompressionSessionPrepareToEncodeFrames (session);
+ if (status != noErr) {
+ GST_ERROR_OBJECT (self,
+ "VTCompressionSessionPrepareToEncodeFrames() returned: %d",
+ (int) status);
+ }
+ }
+ #endif
+
+ beach:
+ if (encoder_spec)
+ CFRelease (encoder_spec);
+ CFRelease (pb_attrs);
+
+ return session;
+ }
+
+ static void
+ gst_vtenc_destroy_session (GstVTEnc * self, VTCompressionSessionRef * session)
+ {
+ VTCompressionSessionInvalidate (*session);
+ if (*session != NULL) {
+ CFRelease (*session);
+ *session = NULL;
+ }
+ }
+
+ typedef struct
+ {
+ GstVTEnc *self;
+ VTCompressionSessionRef session;
+ } GstVTDumpPropCtx;
+
+ static void
+ gst_vtenc_session_dump_property (CFStringRef prop_name,
+ CFDictionaryRef prop_attrs, GstVTDumpPropCtx * dpc)
+ {
+ gchar *name_str;
+ CFTypeRef prop_value;
+ OSStatus status;
+
+ name_str = gst_vtutil_string_to_utf8 (prop_name);
+ if (dpc->self->dump_attributes) {
+ gchar *attrs_str;
+
+ attrs_str = gst_vtutil_object_to_string (prop_attrs);
+ GST_DEBUG_OBJECT (dpc->self, "%s = %s", name_str, attrs_str);
+ g_free (attrs_str);
+ }
+
+ status = VTSessionCopyProperty (dpc->session, prop_name, NULL, &prop_value);
+ if (status == noErr) {
+ gchar *value_str;
+
+ value_str = gst_vtutil_object_to_string (prop_value);
+ GST_DEBUG_OBJECT (dpc->self, "%s = %s", name_str, value_str);
+ g_free (value_str);
+
+ if (prop_value != NULL)
+ CFRelease (prop_value);
+ } else {
+ GST_DEBUG_OBJECT (dpc->self, "%s = <failed to query: %d>",
+ name_str, (int) status);
+ }
+
+ g_free (name_str);
+ }
+
+ static void
+ gst_vtenc_session_dump_properties (GstVTEnc * self,
+ VTCompressionSessionRef session)
+ {
+ GstVTDumpPropCtx dpc = { self, session };
+ CFDictionaryRef dict;
+ OSStatus status;
+
+ status = VTSessionCopySupportedPropertyDictionary (session, &dict);
+ if (status != noErr)
+ goto error;
+ CFDictionaryApplyFunction (dict,
+ (CFDictionaryApplierFunction) gst_vtenc_session_dump_property, &dpc);
+ CFRelease (dict);
+
+ return;
+
+ error:
+ GST_WARNING_OBJECT (self, "failed to dump properties");
+ }
+
+ static void
+ gst_vtenc_session_configure_expected_framerate (GstVTEnc * self,
+ VTCompressionSessionRef session, gdouble framerate)
+ {
+ gst_vtenc_session_configure_property_double (self, session,
+ kVTCompressionPropertyKey_ExpectedFrameRate, framerate);
+ }
+
+ static void
+ gst_vtenc_session_configure_max_keyframe_interval (GstVTEnc * self,
+ VTCompressionSessionRef session, gint interval)
+ {
+ gst_vtenc_session_configure_property_int (self, session,
+ kVTCompressionPropertyKey_MaxKeyFrameInterval, interval);
+ }
+
+ static void
+ gst_vtenc_session_configure_max_keyframe_interval_duration (GstVTEnc * self,
+ VTCompressionSessionRef session, gdouble duration)
+ {
+ gst_vtenc_session_configure_property_double (self, session,
+ kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, duration);
+ }
+
+ static void
+ gst_vtenc_session_configure_bitrate (GstVTEnc * self,
+ VTCompressionSessionRef session, guint bitrate)
+ {
+ gst_vtenc_session_configure_property_int (self, session,
+ kVTCompressionPropertyKey_AverageBitRate, bitrate);
+ }
+
+ static void
+ gst_vtenc_session_configure_allow_frame_reordering (GstVTEnc * self,
+ VTCompressionSessionRef session, gboolean allow_frame_reordering)
+ {
+ VTSessionSetProperty (session, kVTCompressionPropertyKey_AllowFrameReordering,
+ allow_frame_reordering ? kCFBooleanTrue : kCFBooleanFalse);
+ }
+
+ static void
+ gst_vtenc_session_configure_realtime (GstVTEnc * self,
+ VTCompressionSessionRef session, gboolean realtime)
+ {
+ VTSessionSetProperty (session, kVTCompressionPropertyKey_RealTime,
+ realtime ? kCFBooleanTrue : kCFBooleanFalse);
+ }
+
+ static OSStatus
+ gst_vtenc_session_configure_property_int (GstVTEnc * self,
+ VTCompressionSessionRef session, CFStringRef name, gint value)
+ {
+ CFNumberRef num;
+ OSStatus status;
+ gchar name_str[128];
+
+ num = CFNumberCreate (NULL, kCFNumberIntType, &value);
+ status = VTSessionSetProperty (session, name, num);
+ CFRelease (num);
+
+ CFStringGetCString (name, name_str, sizeof (name_str), kCFStringEncodingUTF8);
+ GST_DEBUG_OBJECT (self, "%s(%d) => %d", name_str, value, (int) status);
+
+ return status;
+ }
+
+ static OSStatus
+ gst_vtenc_session_configure_property_double (GstVTEnc * self,
+ VTCompressionSessionRef session, CFStringRef name, gdouble value)
+ {
+ CFNumberRef num;
+ OSStatus status;
+ gchar name_str[128];
+
+ num = CFNumberCreate (NULL, kCFNumberDoubleType, &value);
+ status = VTSessionSetProperty (session, name, num);
+ CFRelease (num);
+
+ CFStringGetCString (name, name_str, sizeof (name_str), kCFStringEncodingUTF8);
+ GST_DEBUG_OBJECT (self, "%s(%f) => %d", name_str, value, (int) status);
+
+ return status;
+ }
+
+ static void
+ gst_vtenc_update_latency (GstVTEnc * self)
+ {
+ OSStatus status;
+ CFNumberRef value;
+ int frames = 0;
+ GstClockTime frame_duration;
+ GstClockTime latency;
+
+ if (self->video_info.fps_d == 0) {
+ GST_INFO_OBJECT (self, "framerate not known, can't set latency");
+ return;
+ }
+
+ status = VTSessionCopyProperty (self->session,
+ kVTCompressionPropertyKey_NumberOfPendingFrames, NULL, &value);
+ if (status != noErr || !value) {
+ GST_INFO_OBJECT (self, "failed to get NumberOfPendingFrames: %d", status);
+ return;
+ }
+
+ CFNumberGetValue (value, kCFNumberSInt32Type, &frames);
+ if (self->latency_frames == -1 || self->latency_frames != frames) {
+ self->latency_frames = frames;
+ if (self->video_info.fps_d == 0 || self->video_info.fps_n == 0) {
+ /* FIXME: Assume 25fps. This is better than reporting no latency at
+ * all and then later failing in live pipelines
+ */
+ frame_duration = gst_util_uint64_scale (GST_SECOND, 1, 25);
+ } else {
+ frame_duration = gst_util_uint64_scale (GST_SECOND,
+ self->video_info.fps_d, self->video_info.fps_n);
+ }
+ latency = frame_duration * frames;
+ GST_INFO_OBJECT (self,
+ "latency status %d frames %d fps %d/%d time %" GST_TIME_FORMAT, status,
+ frames, self->video_info.fps_n, self->video_info.fps_d,
+ GST_TIME_ARGS (latency));
+ gst_video_encoder_set_latency (GST_VIDEO_ENCODER (self), latency, latency);
+ }
+ CFRelease (value);
+ }
+
+ static GstFlowReturn
+ gst_vtenc_encode_frame (GstVTEnc * self, GstVideoCodecFrame * frame)
+ {
+ CMTime ts, duration;
+ GstCoreMediaMeta *meta;
+ CVPixelBufferRef pbuf = NULL;
+ GstVideoCodecFrame *outframe;
+ OSStatus vt_status;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean renegotiated;
+ CFDictionaryRef frame_props = NULL;
+
+ if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
+ GST_INFO_OBJECT (self, "received force-keyframe-event, will force intra");
+ frame_props = self->keyframe_props;
+ }
+
+ ts = CMTimeMake (frame->pts, GST_SECOND);
+ if (frame->duration != GST_CLOCK_TIME_NONE)
+ duration = CMTimeMake (frame->duration, GST_SECOND);
+ else
+ duration = kCMTimeInvalid;
+
+ meta = gst_buffer_get_core_media_meta (frame->input_buffer);
+ if (meta != NULL) {
+ pbuf = gst_core_media_buffer_get_pixel_buffer (frame->input_buffer);
+ }
+ #ifdef HAVE_IOS
+ if (pbuf == NULL) {
+ GstVideoFrame inframe, outframe;
+ GstBuffer *outbuf;
+ OSType pixel_format_type;
+ CVReturn cv_ret;
+
+ /* FIXME: iOS has special stride requirements that we don't know yet.
+ * Copy into a newly allocated pixelbuffer for now. Probably makes
+ * sense to create a buffer pool around these at some point.
+ */
+
+ switch (GST_VIDEO_INFO_FORMAT (&self->video_info)) {
+ case GST_VIDEO_FORMAT_I420:
+ pixel_format_type = kCVPixelFormatType_420YpCbCr8Planar;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ pixel_format_type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+ break;
+ default:
+ goto cv_error;
+ }
+
+ if (!gst_video_frame_map (&inframe, &self->video_info, frame->input_buffer,
+ GST_MAP_READ))
+ goto cv_error;
+
+ cv_ret =
+ CVPixelBufferCreate (NULL, self->negotiated_width,
+ self->negotiated_height, pixel_format_type, NULL, &pbuf);
+
+ if (cv_ret != kCVReturnSuccess) {
+ gst_video_frame_unmap (&inframe);
+ goto cv_error;
+ }
+
+ outbuf =
+ gst_core_video_buffer_new ((CVBufferRef) pbuf, &self->video_info, NULL);
+ if (!gst_video_frame_map (&outframe, &self->video_info, outbuf,
+ GST_MAP_WRITE)) {
+ gst_video_frame_unmap (&inframe);
+ gst_buffer_unref (outbuf);
+ CVPixelBufferRelease (pbuf);
+ goto cv_error;
+ }
+
+ if (!gst_video_frame_copy (&outframe, &inframe)) {
+ gst_video_frame_unmap (&inframe);
+ gst_buffer_unref (outbuf);
+ CVPixelBufferRelease (pbuf);
+ goto cv_error;
+ }
+
+ gst_buffer_unref (outbuf);
+ gst_video_frame_unmap (&inframe);
+ gst_video_frame_unmap (&outframe);
+ }
+ #else
+ if (pbuf == NULL) {
+ GstVTEncFrame *vframe;
+ CVReturn cv_ret;
+
+ vframe = gst_vtenc_frame_new (frame->input_buffer, &self->video_info);
+ if (!vframe)
+ goto cv_error;
+
+ {
+ const size_t num_planes = GST_VIDEO_FRAME_N_PLANES (&vframe->videoframe);
+ void *plane_base_addresses[GST_VIDEO_MAX_PLANES];
+ size_t plane_widths[GST_VIDEO_MAX_PLANES];
+ size_t plane_heights[GST_VIDEO_MAX_PLANES];
+ size_t plane_bytes_per_row[GST_VIDEO_MAX_PLANES];
+ OSType pixel_format_type;
+ size_t i;
+
+ for (i = 0; i < num_planes; i++) {
+ plane_base_addresses[i] =
+ GST_VIDEO_FRAME_PLANE_DATA (&vframe->videoframe, i);
+ plane_widths[i] = GST_VIDEO_FRAME_COMP_WIDTH (&vframe->videoframe, i);
+ plane_heights[i] = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe->videoframe, i);
+ plane_bytes_per_row[i] =
+ GST_VIDEO_FRAME_COMP_STRIDE (&vframe->videoframe, i);
+ plane_bytes_per_row[i] =
+ GST_VIDEO_FRAME_COMP_STRIDE (&vframe->videoframe, i);
+ }
+
+ switch (GST_VIDEO_INFO_FORMAT (&self->video_info)) {
+ case GST_VIDEO_FORMAT_I420:
+ pixel_format_type = kCVPixelFormatType_420YpCbCr8Planar;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ pixel_format_type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ pixel_format_type = kCVPixelFormatType_422YpCbCr8;
+ break;
+ default:
+ gst_vtenc_frame_free (vframe);
+ goto cv_error;
+ }
+
+ cv_ret = CVPixelBufferCreateWithPlanarBytes (NULL,
+ self->negotiated_width, self->negotiated_height,
+ pixel_format_type,
+ frame,
+ GST_VIDEO_FRAME_SIZE (&vframe->videoframe),
+ num_planes,
+ plane_base_addresses,
+ plane_widths,
+ plane_heights,
+ plane_bytes_per_row, gst_pixel_buffer_release_cb, vframe, NULL,
+ &pbuf);
+ if (cv_ret != kCVReturnSuccess) {
+ gst_vtenc_frame_free (vframe);
+ goto cv_error;
+ }
+ }
+ }
+ #endif
+
+ /* We need to unlock the stream lock here because
+ * it can wait for gst_vtenc_enqueue_buffer() to
+ * handle a buffer... which will take the stream
+ * lock from another thread and then deadlock */
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
+ vt_status = VTCompressionSessionEncodeFrame (self->session,
+ pbuf, ts, duration, frame_props,
+ GINT_TO_POINTER (frame->system_frame_number), NULL);
+ GST_VIDEO_ENCODER_STREAM_LOCK (self);
+
+ if (vt_status != noErr) {
+ GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
+ (int) vt_status);
+ }
+
+ gst_video_codec_frame_unref (frame);
+
+ CVPixelBufferRelease (pbuf);
+
+ renegotiated = FALSE;
+ while ((outframe = g_async_queue_try_pop (self->cur_outframes))) {
+ if (outframe->output_buffer) {
+ if (!renegotiated) {
+ meta = gst_buffer_get_core_media_meta (outframe->output_buffer);
+ /* Try to renegotiate once */
+ if (meta) {
+ if (gst_vtenc_negotiate_downstream (self, meta->sample_buf)) {
+ renegotiated = TRUE;
+ } else {
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ gst_video_codec_frame_unref (outframe);
+ /* the rest of the frames will be pop'd and unref'd later */
+ break;
+ }
+ }
+ }
+
+ gst_vtenc_update_latency (self);
+ }
+
+ /* releases frame, even if it has no output buffer (i.e. failed to encode) */
+ ret =
+ gst_video_encoder_finish_frame (GST_VIDEO_ENCODER_CAST (self),
+ outframe);
+ }
+
+ return ret;
+
+ cv_error:
+ {
+ gst_video_codec_frame_unref (frame);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static void
+ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
+ void *sourceFrameRefCon,
+ OSStatus status,
+ VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer)
+ {
+ GstVTEnc *self = outputCallbackRefCon;
+ gboolean is_keyframe;
+ GstVideoCodecFrame *frame;
+
+ frame =
+ gst_video_encoder_get_frame (GST_VIDEO_ENCODER_CAST (self),
+ GPOINTER_TO_INT (sourceFrameRefCon));
+
+ if (status != noErr) {
+ if (frame) {
+ GST_ELEMENT_ERROR (self, LIBRARY, ENCODE, (NULL),
+ ("Failed to encode frame %d: %d", frame->system_frame_number,
+ (int) status));
+ } else {
+ GST_ELEMENT_ERROR (self, LIBRARY, ENCODE, (NULL),
+ ("Failed to encode (frame unknown): %d", (int) status));
+ }
+ goto beach;
+ }
+
+ if (!frame) {
+ GST_WARNING_OBJECT (self, "No corresponding frame found!");
+ goto beach;
+ }
+
+ /* This may happen if we don't have enough bitrate */
+ if (sampleBuffer == NULL)
+ goto beach;
+
+ is_keyframe = gst_vtenc_buffer_is_keyframe (self, sampleBuffer);
+
+ if (is_keyframe) {
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ gst_vtenc_clear_cached_caps_downstream (self);
+ }
+
+ /* We are dealing with block buffers here, so we don't need
+ * to enable the use of the video meta API on the core media buffer */
+ frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, NULL);
+
+ beach:
+ /* needed anyway so the frame will be released */
+ if (frame)
+ g_async_queue_push (self->cur_outframes, frame);
+ }
+
+ static gboolean
+ gst_vtenc_buffer_is_keyframe (GstVTEnc * self, CMSampleBufferRef sbuf)
+ {
+ gboolean result = FALSE;
+ CFArrayRef attachments_for_sample;
+
+ attachments_for_sample = CMSampleBufferGetSampleAttachmentsArray (sbuf, 0);
+ if (attachments_for_sample != NULL) {
+ CFDictionaryRef attachments;
+ CFBooleanRef depends_on_others;
+
+ attachments = CFArrayGetValueAtIndex (attachments_for_sample, 0);
+ depends_on_others = CFDictionaryGetValue (attachments,
+ kCMSampleAttachmentKey_DependsOnOthers);
+ result = (depends_on_others == kCFBooleanFalse);
+ }
+
+ return result;
+ }
+
+ #ifndef HAVE_IOS
+ static GstVTEncFrame *
+ gst_vtenc_frame_new (GstBuffer * buf, GstVideoInfo * video_info)
+ {
+ GstVTEncFrame *frame;
+
+ frame = g_slice_new (GstVTEncFrame);
+ frame->buf = gst_buffer_ref (buf);
+ if (!gst_video_frame_map (&frame->videoframe, video_info, buf, GST_MAP_READ)) {
+ gst_buffer_unref (frame->buf);
+ g_slice_free (GstVTEncFrame, frame);
+ return NULL;
+ }
+
+ return frame;
+ }
+
+ static void
+ gst_vtenc_frame_free (GstVTEncFrame * frame)
+ {
+ gst_video_frame_unmap (&frame->videoframe);
+ gst_buffer_unref (frame->buf);
+ g_slice_free (GstVTEncFrame, frame);
+ }
+
+ static void
+ gst_pixel_buffer_release_cb (void *releaseRefCon, const void *dataPtr,
+ size_t dataSize, size_t numberOfPlanes, const void *planeAddresses[])
+ {
+ GstVTEncFrame *frame = (GstVTEncFrame *) releaseRefCon;
+ gst_vtenc_frame_free (frame);
+ }
+ #endif
+
+ static void
+ gst_vtenc_register (GstPlugin * plugin,
+ const GstVTEncoderDetails * codec_details)
+ {
+ GTypeInfo type_info = {
+ sizeof (GstVTEncClass),
+ (GBaseInitFunc) gst_vtenc_base_init,
+ NULL,
+ (GClassInitFunc) gst_vtenc_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVTEnc),
+ 0,
+ (GInstanceInitFunc) gst_vtenc_init,
+ };
+ gchar *type_name;
+ GType type;
+ gboolean result;
+
+ type_name = g_strdup_printf ("vtenc_%s", codec_details->element_name);
+
+ type =
+ g_type_register_static (GST_TYPE_VIDEO_ENCODER, type_name, &type_info, 0);
+
+ g_type_set_qdata (type, GST_VTENC_CODEC_DETAILS_QDATA,
+ (gpointer) codec_details);
+
+ result = gst_element_register (plugin, type_name, GST_RANK_PRIMARY, type);
+ if (!result) {
+ GST_ERROR_OBJECT (plugin, "failed to register element %s", type_name);
+ }
+
+ g_free (type_name);
+ }
+
+ static const GstVTEncoderDetails gst_vtenc_codecs[] = {
+ {"H.264", "h264", "video/x-h264", kCMVideoCodecType_H264, FALSE},
+ #ifndef HAVE_IOS
+ {"H.264 (HW only)", "h264_hw", "video/x-h264", kCMVideoCodecType_H264, TRUE},
+ #endif
+ };
+
+ void
+ gst_vtenc_register_elements (GstPlugin * plugin)
+ {
+ guint i;
+
+ GST_DEBUG_CATEGORY_INIT (gst_vtenc_debug, "vtenc",
+ 0, "Apple VideoToolbox Encoder Wrapper");
+
+ for (i = 0; i != G_N_ELEMENTS (gst_vtenc_codecs); i++)
+ gst_vtenc_register (plugin, &gst_vtenc_codecs[i]);
+ }
--- /dev/null
+ /*
+ *
+ * BlueZ - Bluetooth protocol stack for Linux
+ *
+ * Copyright (C) 2012 Collabora Ltd.
+ *
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include <unistd.h>
+ #include <stdint.h>
+ #include <string.h>
+ #include <poll.h>
+
+ #include <gst/rtp/gstrtppayloads.h>
+ #include "gstbluezelements.h"
+ #include "gstavdtpsrc.h"
+
+ GST_DEBUG_CATEGORY_STATIC (avdtpsrc_debug);
+ #define GST_CAT_DEFAULT (avdtpsrc_debug)
+
+ #define DEFAULT_VOLUME 127
+
+ enum
+ {
+ PROP_0,
+ PROP_TRANSPORT,
+ PROP_TRANSPORT_VOLUME,
+ };
+
+ #define parent_class gst_avdtp_src_parent_class
+ G_DEFINE_TYPE (GstAvdtpSrc, gst_avdtp_src, GST_TYPE_BASE_SRC);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (avdtpsrc, "avdtpsrc", GST_RANK_NONE,
+ GST_TYPE_AVDTP_SRC, bluez_element_init (plugin));
+
+ static GstStaticPadTemplate gst_avdtp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\","
+ "payload = (int) "
+ GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 16000, 32000, "
+ "44100, 48000 }, " "encoding-name = (string) \"SBC\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\","
+ "payload = (int) "
+ GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 8000, 11025, 12000, 16000, "
+ "22050, 2400, 32000, 44100, 48000, 64000, 88200, 96000 }, "
+ "encoding-name = (string) \"MP4A-LATM\"; "));
+
+ static void gst_avdtp_src_finalize (GObject * object);
+ static void gst_avdtp_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_avdtp_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+ static GstCaps *gst_avdtp_src_getcaps (GstBaseSrc * bsrc, GstCaps * filter);
+ static gboolean gst_avdtp_src_query (GstBaseSrc * bsrc, GstQuery * query);
+ static gboolean gst_avdtp_src_start (GstBaseSrc * bsrc);
+ static gboolean gst_avdtp_src_stop (GstBaseSrc * bsrc);
+ static GstFlowReturn gst_avdtp_src_create (GstBaseSrc * bsrc, guint64 offset,
+ guint length, GstBuffer ** outbuf);
+ static gboolean gst_avdtp_src_unlock (GstBaseSrc * bsrc);
+ static gboolean gst_avdtp_src_unlock_stop (GstBaseSrc * bsrc);
+
+ static void
+ gst_avdtp_src_class_init (GstAvdtpSrcClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_avdtp_src_finalize);
+ gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_avdtp_src_set_property);
+ gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_avdtp_src_get_property);
+
+ basesrc_class->start = GST_DEBUG_FUNCPTR (gst_avdtp_src_start);
+ basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_avdtp_src_stop);
+ basesrc_class->create = GST_DEBUG_FUNCPTR (gst_avdtp_src_create);
+ basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_avdtp_src_unlock);
+ basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_avdtp_src_unlock_stop);
+ basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_avdtp_src_getcaps);
+ basesrc_class->query = GST_DEBUG_FUNCPTR (gst_avdtp_src_query);
+
+ g_object_class_install_property (gobject_class, PROP_TRANSPORT,
+ g_param_spec_string ("transport",
+ "Transport", "Use configured transport", NULL, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_TRANSPORT_VOLUME,
+ g_param_spec_uint ("transport-volume",
+ "Transport volume",
+ "Volume of the transport (only valid if transport is acquired)",
+ 0, 127, DEFAULT_VOLUME, G_PARAM_READWRITE));
+
+ gst_element_class_set_static_metadata (element_class,
+ "Bluetooth AVDTP Source",
+ "Source/Audio/Network/RTP",
+ "Receives audio from an A2DP device",
+ "Arun Raghavan <arun.raghavan@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (avdtpsrc_debug, "avdtpsrc", 0,
+ "Bluetooth AVDTP Source");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_avdtp_src_template);
+ }
+
+ static void
+ gst_avdtp_src_init (GstAvdtpSrc * avdtpsrc)
+ {
+ avdtpsrc->poll = gst_poll_new (TRUE);
+
+ avdtpsrc->duration = GST_CLOCK_TIME_NONE;
+ avdtpsrc->transport_volume = DEFAULT_VOLUME;
+
+ gst_base_src_set_format (GST_BASE_SRC (avdtpsrc), GST_FORMAT_TIME);
+ gst_base_src_set_live (GST_BASE_SRC (avdtpsrc), TRUE);
+ gst_base_src_set_do_timestamp (GST_BASE_SRC (avdtpsrc), TRUE);
+ }
+
+ static void
+ gst_avdtp_src_finalize (GObject * object)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (object);
+
+ gst_poll_free (avdtpsrc->poll);
+
+ gst_avdtp_connection_reset (&avdtpsrc->conn);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_avdtp_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (object);
+
+ switch (prop_id) {
+ case PROP_TRANSPORT:
+ g_value_set_string (value, avdtpsrc->conn.transport);
+ break;
+
+ case PROP_TRANSPORT_VOLUME:
+ g_value_set_uint (value, avdtpsrc->transport_volume);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_avdtp_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (object);
+
+ switch (prop_id) {
+ case PROP_TRANSPORT:
+ gst_avdtp_connection_set_transport (&avdtpsrc->conn,
+ g_value_get_string (value));
+ break;
+
+ case PROP_TRANSPORT_VOLUME:
+ avdtpsrc->transport_volume = g_value_get_uint (value);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ gst_avdtp_src_query (GstBaseSrc * bsrc, GstQuery * query)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:{
+ GstFormat format;
+
+ if (avdtpsrc->duration != GST_CLOCK_TIME_NONE) {
+ gst_query_parse_duration (query, &format, NULL);
+
+ if (format == GST_FORMAT_TIME) {
+ gst_query_set_duration (query, format, (gint64) avdtpsrc->duration);
+ ret = TRUE;
+ }
+ }
+
+ break;
+ }
+
+ default:
+ ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+ }
+
+ return ret;
+ }
+
+ static GstCaps *
+ gst_avdtp_src_getcaps (GstBaseSrc * bsrc, GstCaps * filter)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+ GstCaps *caps = NULL, *ret = NULL;
+
+ if (avdtpsrc->dev_caps) {
+ const GValue *value;
+ const char *format;
+ int rate;
+ GstStructure *structure = gst_caps_get_structure (avdtpsrc->dev_caps, 0);
+
+ format = gst_structure_get_name (structure);
+
+ if (g_str_equal (format, "audio/x-sbc")) {
+ /* FIXME: we can return a fixed payload type once we
+ * are in PLAYING */
+ caps = gst_caps_new_simple ("application/x-rtp",
+ "media", G_TYPE_STRING, "audio",
+ "payload", GST_TYPE_INT_RANGE, 96, 127,
+ "encoding-name", G_TYPE_STRING, "SBC", NULL);
+ } else if (g_str_equal (format, "audio/mpeg")) {
+ caps = gst_caps_new_simple ("application/x-rtp",
+ "media", G_TYPE_STRING, "audio",
+ "payload", GST_TYPE_INT_RANGE, 96, 127,
+ "encoding-name", G_TYPE_STRING, "MP4A-LATM", NULL);
+
+ value = gst_structure_get_value (structure, "mpegversion");
+ if (!value || !G_VALUE_HOLDS_INT (value)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get mpegversion");
+ gst_caps_unref (caps);
+ return NULL;
+ }
+ gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT,
+ g_value_get_int (value), NULL);
+
+ value = gst_structure_get_value (structure, "channels");
+ if (!value || !G_VALUE_HOLDS_INT (value)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get channels");
+ gst_caps_unref (caps);
+ return NULL;
+ }
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT,
+ g_value_get_int (value), NULL);
+
+ value = gst_structure_get_value (structure, "base-profile");
+ if (!value || !G_VALUE_HOLDS_STRING (value)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get base-profile");
+ gst_caps_unref (caps);
+ return NULL;
+ }
+ gst_caps_set_simple (caps, "base-profile", G_TYPE_STRING,
+ g_value_get_string (value), NULL);
+
+ } else {
+ GST_ERROR_OBJECT (avdtpsrc,
+ "Only SBC and MPEG-2/4 are supported at the moment");
++ return NULL;
+ }
+
+ value = gst_structure_get_value (structure, "rate");
+ if (!value || !G_VALUE_HOLDS_INT (value)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get sample rate");
+ gst_caps_unref (caps);
+ return NULL;
+ }
+ rate = g_value_get_int (value);
+
+ gst_caps_set_simple (caps, "clock-rate", G_TYPE_INT, rate, NULL);
+
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ } else
+ ret = caps;
+ } else {
+ GST_DEBUG_OBJECT (avdtpsrc, "device not open, using template caps");
+ ret = GST_BASE_SRC_CLASS (parent_class)->get_caps (bsrc, filter);
+ }
+
+ return ret;
+ }
+
+ static void
+ avrcp_metadata_cb (GstAvrcpConnection * avrcp, GstTagList * taglist,
+ gpointer user_data)
+ {
+ GstAvdtpSrc *src = GST_AVDTP_SRC (user_data);
+ guint64 duration;
+
+ if (gst_tag_list_get_uint64 (taglist, GST_TAG_DURATION, &duration)) {
+ src->duration = duration;
+ gst_element_post_message (GST_ELEMENT (src),
+ gst_message_new_duration_changed (GST_OBJECT (src)));
+ }
+
+ gst_pad_push_event (GST_BASE_SRC_PAD (src),
+ gst_event_new_tag (gst_tag_list_copy (taglist)));
+ gst_element_post_message (GST_ELEMENT (src),
+ gst_message_new_tag (GST_OBJECT (src), taglist));
+ }
+
+ static void
+ gst_avdtp_src_start_avrcp (GstAvdtpSrc * src)
+ {
+ gchar *path, **strv;
+ int i;
+
+ /* Strip out the /fdX in /org/bluez/dev_.../fdX */
+ strv = g_strsplit (src->conn.transport, "/", -1);
+
+ for (i = 0; strv[i]; i++);
+ g_return_if_fail (i > 0);
+
+ g_free (strv[i - 1]);
+ strv[i - 1] = NULL;
+
+ path = g_strjoinv ("/", strv);
+ g_strfreev (strv);
+
+ src->avrcp = gst_avrcp_connection_new (path, avrcp_metadata_cb, src, NULL);
+
+ g_free (path);
+ }
+
+ static void
+ gst_avdtp_src_stop_avrcp (GstAvdtpSrc * src)
+ {
+ gst_avrcp_connection_free (src->avrcp);
+ }
+
+ static gboolean
+ gst_avdtp_src_start (GstBaseSrc * bsrc)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+
+ /* None of this can go into prepare() since we need to set up the
+ * connection to figure out what format the device is going to send us.
+ */
+
+ if (!gst_avdtp_connection_acquire (&avdtpsrc->conn, FALSE)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to acquire connection");
+ return FALSE;
+ }
+
+ if (!gst_avdtp_connection_get_properties (&avdtpsrc->conn)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get transport properties");
+ goto fail;
+ }
+
+ if (!gst_avdtp_connection_conf_recv_stream_fd (&avdtpsrc->conn)) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to configure stream fd");
+ goto fail;
+ }
+
+ GST_DEBUG_OBJECT (avdtpsrc, "Setting block size to link MTU (%d)",
+ avdtpsrc->conn.data.link_mtu);
+ gst_base_src_set_blocksize (GST_BASE_SRC (avdtpsrc),
+ avdtpsrc->conn.data.link_mtu);
+
+ avdtpsrc->dev_caps = gst_avdtp_connection_get_caps (&avdtpsrc->conn);
+ if (!avdtpsrc->dev_caps) {
+ GST_ERROR_OBJECT (avdtpsrc, "Failed to get device caps");
+ goto fail;
+ }
+
+ gst_poll_fd_init (&avdtpsrc->pfd);
+ avdtpsrc->pfd.fd = g_io_channel_unix_get_fd (avdtpsrc->conn.stream);
+
+ gst_poll_add_fd (avdtpsrc->poll, &avdtpsrc->pfd);
+ gst_poll_fd_ctl_read (avdtpsrc->poll, &avdtpsrc->pfd, TRUE);
+ gst_poll_set_flushing (avdtpsrc->poll, FALSE);
+
+ g_atomic_int_set (&avdtpsrc->unlocked, FALSE);
+
+ /* The life time of the connection is shorter than the src object, so we
+ * don't need to worry about memory management */
+ gst_avdtp_connection_notify_volume (&avdtpsrc->conn, G_OBJECT (avdtpsrc),
+ "transport-volume");
+
+ gst_avdtp_src_start_avrcp (avdtpsrc);
+
+ return TRUE;
+
+ fail:
+ gst_avdtp_connection_release (&avdtpsrc->conn);
+ return FALSE;
+ }
+
+ static gboolean
+ gst_avdtp_src_stop (GstBaseSrc * bsrc)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+
+ gst_poll_remove_fd (avdtpsrc->poll, &avdtpsrc->pfd);
+ gst_poll_set_flushing (avdtpsrc->poll, TRUE);
+
+ gst_avdtp_src_stop_avrcp (avdtpsrc);
+ gst_avdtp_connection_release (&avdtpsrc->conn);
+
+ if (avdtpsrc->dev_caps) {
+ gst_caps_unref (avdtpsrc->dev_caps);
+ avdtpsrc->dev_caps = NULL;
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_avdtp_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
+ GstBuffer ** outbuf)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+ GstBuffer *buf = NULL;
+ GstMapInfo info;
+ int ret;
+
+ if (g_atomic_int_get (&avdtpsrc->unlocked))
+ return GST_FLOW_FLUSHING;
+
+ /* We don't operate in GST_FORMAT_BYTES, so offset is ignored */
+
+ while ((ret = gst_poll_wait (avdtpsrc->poll, GST_CLOCK_TIME_NONE))) {
+ if (g_atomic_int_get (&avdtpsrc->unlocked))
+ /* We're unlocked, time to gtfo */
+ return GST_FLOW_FLUSHING;
+
+ if (ret < 0)
+ /* Something went wrong */
+ goto read_error;
+
+ if (ret > 0)
+ /* Got some data */
+ break;
+ }
+
+ ret = GST_BASE_SRC_CLASS (parent_class)->alloc (bsrc, offset, length, outbuf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto alloc_failed;
+
+ buf = *outbuf;
+
+ gst_buffer_map (buf, &info, GST_MAP_WRITE);
+
+ ret = read (avdtpsrc->pfd.fd, info.data, length);
+
+ if (ret < 0)
+ goto read_error;
+ else if (ret == 0) {
+ GST_INFO_OBJECT (avdtpsrc, "Got EOF on the transport fd");
+ goto eof;
+ }
+
+ if (ret < length)
+ gst_buffer_set_size (buf, ret);
+
+ GST_LOG_OBJECT (avdtpsrc, "Read %d bytes", ret);
+
+ gst_buffer_unmap (buf, &info);
+ *outbuf = buf;
+
+ return GST_FLOW_OK;
+
+ alloc_failed:
+ {
+ GST_DEBUG_OBJECT (bsrc, "alloc failed: %s", gst_flow_get_name (ret));
+ return ret;
+ }
+
+ read_error:
+ GST_ERROR_OBJECT (avdtpsrc, "Error while reading audio data: %s",
+ strerror (errno));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+
+ eof:
+ gst_buffer_unref (buf);
+ return GST_FLOW_EOS;
+ }
+
+ static gboolean
+ gst_avdtp_src_unlock (GstBaseSrc * bsrc)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+
+ g_atomic_int_set (&avdtpsrc->unlocked, TRUE);
+
+ gst_poll_set_flushing (avdtpsrc->poll, TRUE);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_avdtp_src_unlock_stop (GstBaseSrc * bsrc)
+ {
+ GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc);
+
+ g_atomic_int_set (&avdtpsrc->unlocked, FALSE);
+
+ gst_poll_set_flushing (avdtpsrc->poll, FALSE);
+
+ /* Flush out any stale data that might be buffered */
+ gst_avdtp_connection_conf_recv_stream_fd (&avdtpsrc->conn);
+
+ return TRUE;
+ }
--- /dev/null
-static const GUID CLSID_AVI_DECOMPRESSOR =
- {0xCF49D4E0, 0x1115, 0x11CE,
+ /*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008, 2009, 2010> Fluendo <support@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <dmoreg.h>
+ #include <wmcodecdsp.h>
+
+ #include "gstdshowvideodec.h"
+ #include <gst/video/video.h>
+
+ GST_DEBUG_CATEGORY_STATIC (dshowvideodec_debug);
+ #define GST_CAT_DEFAULT dshowvideodec_debug
+
+ #define gst_dshowvideodec_parent_class parent_class
+ G_DEFINE_TYPE(GstDshowVideoDec, gst_dshowvideodec, GST_TYPE_ELEMENT)
+
+ static void gst_dshowvideodec_finalize (GObject * object);
+ static GstStateChangeReturn gst_dshowvideodec_change_state
+ (GstElement * element, GstStateChange transition);
+
+ /* sink pad overwrites */
+ static gboolean gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps);
+ static gboolean gst_dshowvideodec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event);
+ static GstFlowReturn gst_dshowvideodec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer);
+
+ /* src pad overwrites */
+ static GstCaps *gst_dshowvideodec_src_getcaps (GstPad * pad);
+ static gboolean gst_dshowvideodec_src_setcaps (GstPad * pad, GstCaps * caps);
+
+ /* utils */
+ static gboolean gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec *
+ vdec);
+ static gboolean gst_dshowvideodec_destroy_graph_and_filters (GstDshowVideoDec *
+ vdec);
+ static gboolean gst_dshowvideodec_flush (GstDshowVideoDec * adec);
+ static gboolean gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec *
+ vdec, const GUID subtype, VIDEOINFOHEADER ** format, guint * size);
+
+
+ #define GUID_MEDIATYPE_VIDEO {0x73646976, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WMVV1 {0x31564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WMVV2 {0x32564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WMVV3 {0x33564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WMVP {0x50564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WMVA {0x41564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_WVC1 {0x31435657, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_CVID {0x64697663, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_MP4S {0x5334504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_MP42 {0x3234504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_MP43 {0x3334504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_M4S2 {0x3253344d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_XVID {0x44495658, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_DX50 {0x30355844, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_DIVX {0x58564944, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_DIV3 {0x33564944, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+
+ #define GUID_MEDIASUBTYPE_MPG4 {0x3447504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_MPEG1Payload {0xe436eb81, 0x524f, 0x11ce, {0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70}}
+
+
+ /* output types */
+ #define GUID_MEDIASUBTYPE_YUY2 {0x32595559, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_YV12 {0x32315659, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+ #define GUID_MEDIASUBTYPE_RGB32 {0xe436eb7e, 0x524f, 0x11ce, { 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70 }}
+ #define GUID_MEDIASUBTYPE_RGB565 {0xe436eb7b, 0x524f, 0x11ce, { 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70 }}
+
+ /* WMV always uses the WMV DMO */
+ static PreferredFilter preferred_wmv_filters[] = {
+ {&CLSID_CWMVDecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}
+ };
+
-static const GUID CLSID_MPEG_VIDEO_DECODER =
- {0xFEB50740, 0x7BEF, 0x11CE,
++static const GUID CLSID_AVI_DECOMPRESSOR =
++ {0xCF49D4E0, 0x1115, 0x11CE,
+ {0xB0, 0x3A, 0x00, 0x20, 0xAF, 0x0B, 0xA7, 0x70}};
+ static PreferredFilter preferred_cinepack_filters[] = {
+ {&CLSID_AVI_DECOMPRESSOR}, {0}
+ };
+
+ /* Various MPEG-4 video variants */
+ // MPG4, mpg4, MP42, mp42
+ static PreferredFilter preferred_mpeg4_filters[] = {
+ {&CLSID_CMpeg4DecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}};
+ // MP4S, mp4s, M4S2, m4s2
+ static PreferredFilter preferred_mp4s_filters[] = {
+ {&CLSID_CMpeg4sDecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}};
+ // MP43, mp43
+ static PreferredFilter preferred_mp43_filters[] = {
+ {&CLSID_CMpeg43DecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}};
+
-
++static const GUID CLSID_MPEG_VIDEO_DECODER =
++ {0xFEB50740, 0x7BEF, 0x11CE,
+ {0x9B, 0xD9, 0x00, 0x00, 0xE2, 0x02, 0x59, 0x9C}};
+ static PreferredFilter preferred_mpeg1_filters[] = {
+ {&CLSID_MPEG_VIDEO_DECODER}, {0}
+ };
+
+ /* video codecs array */
+ static const VideoCodecEntry video_dec_codecs[] = {
+ {"dshowvdec_wmv1", "Windows Media Video 7",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '1'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV1,
+ "video/x-wmv, wmvversion = (int) 1",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_wmv2", "Windows Media Video 8",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV2,
+ "video/x-wmv, wmvversion = (int) 2",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_wmv3", "Windows Media Video 9",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV3,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (string) WMV3",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_wmvp", "Windows Media Video 9 Image",
+ GST_MAKE_FOURCC ('W', 'M', 'V', 'P'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVP,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (string) { WMVP, MSS1 }",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_wmva", "Windows Media Video 9 Advanced",
+ GST_MAKE_FOURCC ('W', 'M', 'V', 'A'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVA,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (string) WMVA",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_wvc1", "Windows Media VC1 video",
+ GST_MAKE_FOURCC ('W', 'V', 'C', '1'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WVC1,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (string) WVC1",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_wmv_filters},
+
+ {"dshowvdec_cinepak", "Cinepack",
+ 0x64697663,
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_CVID,
+ "video/x-cinepak",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_RGB32,
+ "video/x-raw, format=(string)RGB, bpp=(int)32, depth=(int)24, "
+ "endianness=(int)4321, red_mask=(int)65280, "
+ "green_mask=(int)16711680, blue_mask=(int)-16777216",
+ preferred_cinepack_filters},
+
+ {"dshowvdec_msmpeg41", "Microsoft ISO MPEG-4 version 1",
+ GST_MAKE_FOURCC ('M', 'P', '4', 'S'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP4S,
+ "video/x-msmpeg, msmpegversion=(int)41",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mp4s_filters},
+
+ {"dshowvdec_msmpeg42", "Microsoft ISO MPEG-4 version 2",
+ GST_MAKE_FOURCC ('M', 'P', '4', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP42,
+ "video/x-msmpeg, msmpegversion=(int)42",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mpeg4_filters},
+
+ {"dshowvdec_msmpeg43", "Microsoft ISO MPEG-4 version 3",
+ GST_MAKE_FOURCC ('M', 'P', '4', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP43,
+ "video/x-msmpeg, msmpegversion=(int)43",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mp43_filters},
+
+ {"dshowvdec_msmpeg4", "Microsoft ISO MPEG-4 version 1.1",
+ GST_MAKE_FOURCC ('M', '4', 'S', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_M4S2,
+ "video/x-msmpeg, msmpegversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mp4s_filters},
+
+ {"dshowvdec_mpeg1",
+ "MPEG-1 Video",
+ GST_MAKE_FOURCC ('M', 'P', 'E', 'G'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPEG1Payload,
+ "video/mpeg, mpegversion= (int) 1, "
+ "parsed= (boolean) true, " "systemstream= (boolean) false",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mpeg1_filters},
- /* FOR RGB directshow decoder will return bottom-up BITMAP
++
+ {"dshowvdec_mpeg4", "MPEG-4 Video",
+ GST_MAKE_FOURCC ('M', 'P', 'G', '4'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPG4,
+ "video/mpeg, msmpegversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2"),
+ preferred_mpeg4_filters},
+
+ /* The rest of these have no preferred filter; windows doesn't come
+ * with anything appropriate */
+ {"dshowvdec_xvid", "XVID Video",
+ GST_MAKE_FOURCC ('X', 'V', 'I', 'D'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_XVID,
+ "video/x-xvid",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2")},
+
+ {"dshowvdec_divx5", "DIVX 5.0 Video",
+ GST_MAKE_FOURCC ('D', 'X', '5', '0'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DX50,
+ "video/x-divx, divxversion=(int)5",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2")},
+
+ {"dshowvdec_divx4", "DIVX 4.0 Video",
+ GST_MAKE_FOURCC ('D', 'I', 'V', 'X'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DIVX,
+ "video/x-divx, divxversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2")},
+
+ {"dshowvdec_divx3", "DIVX 3.0 Video",
+ GST_MAKE_FOURCC ('D', 'I', 'V', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP43,
+ "video/x-divx, divxversion=(int)3",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ GST_VIDEO_CAPS_MAKE("YUY2")}
+ };
+
+ HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
+ {
+ gboolean in_seg = FALSE;
+ guint64 clip_start = 0, clip_stop = 0;
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (mDec);
+ GstBuffer *buf = NULL;
+ GstClockTime start, stop;
+ GstMapInfo map;
+
+ if(pMediaSample)
+ {
+ BYTE *pBuffer = NULL;
+ LONGLONG lStart = 0, lStop = 0;
+ long size = pMediaSample->GetActualDataLength();
+
+ pMediaSample->GetPointer(&pBuffer);
+ pMediaSample->GetTime(&lStart, &lStop);
+
+ start = lStart * 100;
+ stop = lStop * 100;
+ /* check if this buffer is in our current segment */
+ in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME,
+ start, stop, &clip_start, &clip_stop);
+
+ /* if the buffer is out of segment do not push it downstream */
+ if (!in_seg) {
+ GST_DEBUG_OBJECT (mDec,
+ "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ goto done;
+ }
+
+ /* buffer is in our segment, allocate a new out buffer and clip its
+ * timestamps */
+ gst_buffer_pool_acquire_buffer(mDec->buffer_pool, &buf, NULL);
+ if (!buf) {
+ GST_WARNING_OBJECT (mDec,
+ "cannot allocate a new GstBuffer");
+ goto done;
+ }
+
+ /* set buffer properties */
+ GST_BUFFER_TIMESTAMP (buf) = clip_start;
+ GST_BUFFER_DURATION (buf) = clip_stop - clip_start;
+
+ gst_buffer_map(buf, &map, GST_MAP_WRITE);
+ if (strstr (klass->entry->srccaps, "rgb")) {
- gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Video", description,
++ /* FOR RGB directshow decoder will return bottom-up BITMAP
+ * There is probably a way to get top-bottom video frames from
+ * the decoder...
+ */
+ gint line = 0;
+ guint stride = mDec->width * 4;
+
+ for (; line < mDec->height; line++) {
+ memcpy (map.data + (line * stride),
+ pBuffer + (size - ((line + 1) * (stride))), stride);
+ }
+ } else {
+ memcpy (map.data, pBuffer, MIN ((unsigned int)size, map.size));
+ }
+ gst_buffer_unmap(buf, &map);
+
+ GST_LOG_OBJECT (mDec,
+ "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ " duration %" GST_TIME_FORMAT, size,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ /* push the buffer downstream */
+ mDec->last_ret = gst_pad_push (mDec->srcpad, buf);
+ }
+ done:
+
+ return S_OK;
+ }
+
+ HRESULT VideoFakeSink::CheckMediaType(const CMediaType *pmt)
+ {
+ if (pmt != NULL) {
+ if (*pmt == m_MediaType)
+ return S_OK;
+ }
+
+ return S_FALSE;
+ }
+
+ static void
+ gst_dshowvideodec_base_init (gpointer klass)
+ {
+ GstDshowVideoDecClass *videodec_class = (GstDshowVideoDecClass *) klass;
+ GstPadTemplate *src, *sink;
+ GstCaps *srccaps, *sinkcaps;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ const VideoCodecEntry *tmp;
+ gpointer qdata;
+ gchar *longname, *description;
+
+ qdata = g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), DSHOW_CODEC_QDATA);
+
+ /* element details */
+ tmp = videodec_class->entry = (VideoCodecEntry *) qdata;
+
+ longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+ description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+
- gst_caps_set_simple (caps_out,
++ gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Video", description,
+ "Sebastien Moutte <sebastien@moutte.net>");
+
+ g_free (longname);
+ g_free (description);
+
+ sinkcaps = gst_caps_from_string (tmp->sinkcaps);
+ gst_caps_set_simple (sinkcaps,
+ "width", GST_TYPE_INT_RANGE, 16, 4096,
+ "height", GST_TYPE_INT_RANGE, 16, 4096,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+
+ srccaps = gst_caps_from_string (tmp->srccaps);
+
+ sink = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sinkcaps);
+ src = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, src);
+ gst_element_class_add_pad_template (element_class, sink);
+
+ if (sinkcaps)
+ gst_caps_unref(sinkcaps);
+
+ if (srccaps)
+ gst_caps_unref(srccaps);
+ }
+
+ static void
+ gst_dshowvideodec_class_init (GstDshowVideoDecClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_dshowvideodec_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_dshowvideodec_change_state);
+
+ parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
+ }
+
+ static void
+ gst_dshowvideodec_com_thread (GstDshowVideoDec * vdec)
+ {
+ HRESULT res;
+
+ g_mutex_lock (&vdec->com_init_lock);
+
+ /* Initialize COM with a MTA for this process. This thread will
+ * be the first one to enter the apartement and the last one to leave
+ * it, unitializing COM properly */
+
+ res = CoInitializeEx (0, COINIT_MULTITHREADED);
+ if (res == S_FALSE)
+ GST_WARNING_OBJECT (vdec, "COM has been already initialized in the same process");
+ else if (res == RPC_E_CHANGED_MODE)
+ GST_WARNING_OBJECT (vdec, "The concurrency model of COM has changed.");
+ else
+ GST_INFO_OBJECT (vdec, "COM initialized successfully");
+
+ vdec->comInitialized = TRUE;
+
+ /* Signal other threads waiting on this condition that COM was initialized */
+ g_cond_signal (&vdec->com_initialized);
+
+ g_mutex_unlock (&vdec->com_init_lock);
+
+ /* Wait until the uninitialize condition is met to leave the COM apartement */
+ g_mutex_lock (&vdec->com_deinit_lock);
+ g_cond_wait (&vdec->com_uninitialize, &vdec->com_deinit_lock);
+
+ CoUninitialize ();
+ GST_INFO_OBJECT (vdec, "COM uninitialized successfully");
+ vdec->comInitialized = FALSE;
+ g_cond_signal (&vdec->com_uninitialized);
+ g_mutex_unlock (&vdec->com_deinit_lock);
+ }
+
+ static void
+ gst_dshowvideodec_init (GstDshowVideoDec * vdec)
+ {
+ GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vdec);
+
+ /* setup pads */
+ vdec->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "sink"), "sink");
+
+ gst_pad_set_event_function (vdec->sinkpad, gst_dshowvideodec_sink_event);
+ gst_pad_set_chain_function (vdec->sinkpad, gst_dshowvideodec_chain);
+ gst_element_add_pad (GST_ELEMENT (vdec), vdec->sinkpad);
+
+ vdec->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "src"), "src");
+ /* needed to implement caps negotiation on our src pad */
+ /* gst_pad_set_getcaps_function (vdec->srcpad, gst_dshowvideodec_src_getcaps);
+ gst_pad_set_setcaps_function (vdec->srcpad, gst_dshowvideodec_src_setcaps);*/
+ gst_element_add_pad (GST_ELEMENT (vdec), vdec->srcpad);
+
+ vdec->fakesrc = NULL;
+ vdec->fakesink = NULL;
+ vdec->decfilter = NULL;
+
+ vdec->last_ret = GST_FLOW_OK;
+
+ vdec->filtergraph = NULL;
+ vdec->mediafilter = NULL;
+ vdec->srccaps = NULL;
+ vdec->segment = gst_segment_new ();
+
+ vdec->setup = FALSE;
+ vdec->buffer_pool = NULL;
+
+ g_mutex_init (&vdec->com_init_lock);
+ g_mutex_init (&vdec->com_deinit_lock);
+ g_cond_init (&vdec->com_initialized);
+ g_cond_init (&vdec->com_uninitialize);
+ g_cond_init (&vdec->com_uninitialized);
+
+ g_mutex_lock (&vdec->com_init_lock);
+
+ /* create the COM initialization thread */
+ g_thread_new ("COM Init Thread", (GThreadFunc)gst_dshowvideodec_com_thread,
+ vdec);
+
+ /* wait until the COM thread signals that COM has been initialized */
+ g_cond_wait (&vdec->com_initialized, &vdec->com_init_lock);
+ g_mutex_unlock (&vdec->com_init_lock);
+ }
+
+ static void
+ gst_dshowvideodec_finalize (GObject * object)
+ {
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) (object);
+
+ if (vdec->segment) {
+ gst_segment_free (vdec->segment);
+ vdec->segment = NULL;
+ }
+
+ if(vdec->buffer_pool) {
+ gst_object_unref(vdec->buffer_pool);
+ vdec->buffer_pool = NULL;
+ }
+
+ /* signal the COM thread that it sould uninitialize COM */
+ if (vdec->comInitialized) {
+ g_mutex_lock (&vdec->com_deinit_lock);
+ g_cond_signal (&vdec->com_uninitialize);
+ g_cond_wait (&vdec->com_uninitialized, &vdec->com_deinit_lock);
+ g_mutex_unlock (&vdec->com_deinit_lock);
+ }
+
+ g_mutex_clear (&vdec->com_init_lock);
+ g_mutex_clear (&vdec->com_deinit_lock);
+ g_cond_clear (&vdec->com_initialized);
+ g_cond_clear (&vdec->com_uninitialize);
+ g_cond_clear (&vdec->com_uninitialized);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstStateChangeReturn
+ gst_dshowvideodec_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_dshowvideodec_create_graph_and_filters (vdec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (!gst_dshowvideodec_destroy_graph_and_filters (vdec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ }
+
+ static gboolean
+ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
+ {
+ gboolean ret = FALSE;
+ HRESULT hres;
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
+ GstBuffer *extradata = NULL;
+ gsize extra_size;
+ const GValue *v = NULL;
+ guint size = 0;
+ GstCaps *caps_out = NULL;
+ AM_MEDIA_TYPE output_mediatype, input_mediatype;
+ VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
+ IPinPtr output_pin;
+ IPinPtr input_pin;
+ IBaseFilter *srcfilter = NULL;
+ IBaseFilter *sinkfilter = NULL;
+ const GValue *fps, *par;
+ GstQuery *query = NULL;
+ GstBufferPool *pool = NULL;
+ GstStructure *pool_config = NULL;
+ guint pool_size, pool_min, pool_max;
+ GstVideoInfo video_info;
+
+ /* read data */
+ if (!gst_structure_get_int (s, "width", &vdec->width) ||
+ !gst_structure_get_int (s, "height", &vdec->height)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("error getting video width or height from caps"), (NULL));
+ goto end;
+ }
+ fps = gst_structure_get_value (s, "framerate");
+ if (fps) {
+ vdec->fps_n = gst_value_get_fraction_numerator (fps);
+ vdec->fps_d = gst_value_get_fraction_denominator (fps);
+ }
+ else {
+ /* Invent a sane default framerate; the timestamps matter
+ * more anyway. */
+ vdec->fps_n = 25;
+ vdec->fps_d = 1;
+ }
+
+ par = gst_structure_get_value (s, "pixel-aspect-ratio");
+ if (par) {
+ vdec->par_n = gst_value_get_fraction_numerator (par);
+ vdec->par_d = gst_value_get_fraction_denominator (par);
+ }
+ else {
+ vdec->par_n = vdec->par_d = 1;
+ }
+
+ if ((v = gst_structure_get_value (s, "codec_data"))) {
+ extradata = gst_value_get_buffer (v);
+ extra_size = gst_buffer_get_size(extradata);
+ }
+
+ /* define the input type format */
+ memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ input_mediatype.majortype = klass->entry->input_majortype;
+ input_mediatype.subtype = klass->entry->input_subtype;
+ input_mediatype.bFixedSizeSamples = FALSE;
+ input_mediatype.bTemporalCompression = TRUE;
+
+ if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
+ size =
+ sizeof (MPEG1VIDEOINFO) + (extradata ? extra_size - 1 : 0);
+ input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);
+
+ input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
+ if (extradata) {
+ MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;
+
+ gst_buffer_extract(extradata, 0, mpeg_info->bSequenceHeader, extra_size);
+ mpeg_info->cbSequenceHeader = extra_size;
+ }
+ input_mediatype.formattype = FORMAT_MPEGVideo;
+ } else {
+ size =
+ sizeof (VIDEOINFOHEADER) + (extradata ? extra_size : 0);
+ input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);
+ input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
+
+ if (extradata) { /* Codec data is appended after our header */
+ gst_buffer_extract(extradata, 0,
+ ((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER), extra_size);
+ input_vheader->bmiHeader.biSize += extra_size;
+ }
+ input_mediatype.formattype = FORMAT_VideoInfo;
+ }
+
+ input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
+ input_vheader->rcSource.right = vdec->width;
+ input_vheader->rcSource.bottom = vdec->height;
+ input_vheader->rcTarget = input_vheader->rcSource;
+ input_vheader->bmiHeader.biWidth = vdec->width;
+ input_vheader->bmiHeader.biHeight = vdec->height;
+ input_vheader->bmiHeader.biPlanes = 1;
+ input_vheader->bmiHeader.biBitCount = 16;
+ input_vheader->bmiHeader.biCompression = klass->entry->format;
+ input_vheader->bmiHeader.biSizeImage =
+ (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);
+
+ input_mediatype.cbFormat = size;
+ input_mediatype.pbFormat = (BYTE *) input_vheader;
+ input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;
+
+ vdec->fakesrc->GetOutputPin()->SetMediaType(&input_mediatype);
+
+ /* set the sample size for fakesrc filter to the output buffer size */
+ vdec->fakesrc->GetOutputPin()->SetSampleSize(input_mediatype.lSampleSize);
+
+ /* connect our fake src to decoder */
+ hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
+ (void **) &srcfilter);
+ if (FAILED (hres)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't QT fakesrc to IBaseFilter: %x", hres), (NULL));
+ goto end;
+ }
+
+ output_pin = gst_dshow_get_pin_from_filter (srcfilter, PINDIR_OUTPUT);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get output pin from our directshow fakesrc filter"), (NULL));
+ goto end;
+ }
+ input_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get input pin from decoder filter"), (NULL));
+ goto end;
+ }
+
+ hres = vdec->filtergraph->ConnectDirect (output_pin, input_pin, NULL);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't connect fakesrc with decoder (error=%x)", hres), (NULL));
+ goto end;
+ }
+
+ /* get decoder output video format */
+ if (!gst_dshowvideodec_get_filter_output_format (vdec,
+ klass->entry->output_subtype, &output_vheader, &size)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get decoder output video format"), (NULL));
+ goto end;
+ }
+
+ memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ output_mediatype.majortype = klass->entry->output_majortype;
+ output_mediatype.subtype = klass->entry->output_subtype;
+ output_mediatype.bFixedSizeSamples = TRUE;
+ output_mediatype.bTemporalCompression = FALSE;
+ output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
+ output_mediatype.formattype = FORMAT_VideoInfo;
+ output_mediatype.cbFormat = size;
+ output_mediatype.pbFormat = (BYTE *) output_vheader;
+
+ vdec->fakesink->SetMediaType (&output_mediatype);
+
+ /* connect decoder to our fake sink */
+ output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get output pin from our decoder filter"), (NULL));
+ goto end;
+ }
+
+ hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
+ (void **) &sinkfilter);
+ if (FAILED (hres)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't QT fakesink to IBaseFilter: %x", hres), (NULL));
+ goto end;
+ }
+
+ input_pin = gst_dshow_get_pin_from_filter (sinkfilter, PINDIR_INPUT);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get input pin from our directshow fakesink filter"), (NULL));
+ goto end;
+ }
+
+ hres = vdec->filtergraph->ConnectDirect(output_pin, input_pin,
+ &output_mediatype);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't connect decoder with fakesink (error=%x)", hres), (NULL));
+ goto end;
+ }
+
+ /* negotiate output */
+ caps_out = gst_caps_from_string (klass->entry->srccaps);
+ gst_caps_set_simple (caps_out,
+ "width", G_TYPE_INT, vdec->width,
+ "height", G_TYPE_INT, vdec->height, NULL);
+
+ if (vdec->fps_n && vdec->fps_d) {
+ gst_caps_set_simple (caps_out,
+ "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
+ }
+
- hres == S_OK)
++ gst_caps_set_simple (caps_out,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, vdec->par_n, vdec->par_d, NULL);
+
+ if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Failed to negotiate output"), (NULL));
+ goto end;
+ }
+
+ /* request or create a buffer pool */
+ if (vdec->buffer_pool) {
+ gst_object_unref (vdec->buffer_pool);
+ }
+
+ query = gst_query_new_allocation(caps_out, TRUE);
+ gst_pad_peer_query(vdec->srcpad, query);
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &pool_size, &pool_min,
+ &pool_max);
+ }
+ else {
+ pool = NULL;
+ pool_size = output_mediatype.lSampleSize;
+ pool_min = 1;
+ pool_max = 0;
+ }
+
+ if (pool == NULL) {
+ pool = gst_video_buffer_pool_new ();
+ }
+
+ if (!pool) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Could not create buffer bool"), (NULL));
+ goto end;
+ }
+
+ pool_config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (pool_config, caps_out, pool_size,
+ pool_min, pool_max);
+ gst_buffer_pool_set_config (pool, pool_config);
+
+ if (!gst_buffer_pool_set_active (pool, TRUE)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Failed set buffer pool active"), (NULL));
+ goto end;
+ }
+
+ vdec->buffer_pool = pool;
+
+ hres = vdec->mediafilter->Run (-1);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't run the directshow graph (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ ret = TRUE;
+ end:
+ if (caps_out)
+ gst_caps_unref (caps_out);
+ gst_object_unref (vdec);
+ g_free (input_vheader);
+ if (srcfilter)
+ srcfilter->Release();
+ if (sinkfilter)
+ sinkfilter->Release();
+ if (query)
+ gst_query_unref(query);
+ return ret;
+ }
+
+ static gboolean
+ gst_dshowvideodec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ gboolean ret = TRUE;
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ GstCaps *caps;
+ gst_event_parse_caps(event, &caps);
+ ret = gst_dshowvideodec_sink_setcaps(pad, caps);
+ break;
+
+ case GST_EVENT_FLUSH_STOP:
+ gst_dshowvideodec_flush (vdec);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+
+ /* save the new segment in our local current segment */
+ gst_segment_copy_into(segment, vdec->segment);
+
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "new segment received => start=%" GST_TIME_FORMAT " stop=%"
+ GST_TIME_FORMAT, GST_TIME_ARGS (vdec->segment->start),
+ GST_TIME_ARGS (vdec->segment->stop));
+
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ gst_object_unref (vdec);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_dshowvideodec_chain (GstPad * pad, GstObject *parent, GstBuffer * buffer)
+ {
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+ bool discont = FALSE;
+ GstClockTime stop;
+ GstMapInfo map;
+
+ if (!vdec->setup) {
+ /* we are not setup */
+ GST_WARNING_OBJECT (vdec, "Decoder not set up, failing");
+ vdec->last_ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ if (vdec->last_ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (vdec, "last decoding iteration generated a fatal error "
+ "%s", gst_flow_get_name (vdec->last_ret));
+ goto beach;
+ }
+
+ /* check if duration is valid and use duration only when it's valid
+ /* because dshow is not decoding frames having stop smaller than start */
+ if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
+ stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
+ } else {
+ stop = GST_BUFFER_TIMESTAMP (buffer);
+ }
+
+ GST_CAT_LOG_OBJECT (dshowvideodec_debug, vdec,
+ "chain (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT,
+ gst_buffer_get_size (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (stop));
+
+ /* if the incoming buffer has discont flag set => flush decoder data */
+ if (buffer && GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "this buffer has a DISCONT flag (%" GST_TIME_FORMAT "), flushing",
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
+ gst_dshowvideodec_flush (vdec);
+ discont = TRUE;
+ }
+
+ gst_buffer_map(buffer, &map, GST_MAP_READ);
+ /* push the buffer to the directshow decoder */
+ vdec->fakesrc->GetOutputPin()->PushBuffer(
+ map.data, GST_BUFFER_TIMESTAMP (buffer), stop,
+ map.size, discont);
+ gst_buffer_unmap(buffer, &map);
+
+ beach:
+ gst_buffer_unref (buffer);
+ gst_object_unref (vdec);
+
+ return vdec->last_ret;
+ }
+
+ static GstCaps *
+ gst_dshowvideodec_src_getcaps (GstPad * pad)
+ {
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+ GstCaps *caps = NULL;
+
+ if (!vdec->srccaps)
+ vdec->srccaps = gst_caps_new_empty ();
+
+ if (vdec->decfilter) {
+ IPinPtr output_pin;
+ IEnumMediaTypesPtr enum_mediatypes;
+ HRESULT hres;
+ ULONG fetched;
+
+ output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
+ ("failed getting output pin from the decoder"), (NULL));
+ goto beach;
+ }
+
+ hres = output_pin->EnumMediaTypes (&enum_mediatypes);
+ if (hres == S_OK && enum_mediatypes) {
+ AM_MEDIA_TYPE *mediatype = NULL;
+
+ enum_mediatypes->Reset();
+ while (hres =
+ enum_mediatypes->Next(1, &mediatype, &fetched),
- hres == S_OK)
++ hres == S_OK)
+ {
+ VIDEOINFOHEADER *video_info;
+ GstCaps *mediacaps = NULL;
+
+ /* RGB24 */
+ if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_RGB24) &&
+ IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo))
+ {
+ video_info = (VIDEOINFOHEADER *) mediatype->pbFormat;
+
+ /* ffmpegcolorspace handles RGB24 in BIG_ENDIAN */
+ mediacaps = gst_caps_new_simple ("video/x-raw-rgb",
+ "bpp", G_TYPE_INT, 24,
+ "depth", G_TYPE_INT, 24,
+ "width", G_TYPE_INT, video_info->bmiHeader.biWidth,
+ "height", G_TYPE_INT, video_info->bmiHeader.biHeight,
+ "framerate", GST_TYPE_FRACTION,
+ (int) (10000000 / video_info->AvgTimePerFrame), 1, "endianness",
+ G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, 255,
+ "green_mask", G_TYPE_INT, 65280, "blue_mask", G_TYPE_INT,
+ 16711680, NULL);
+
+ if (mediacaps) {
+ vdec->mediatypes = g_list_append (vdec->mediatypes, mediatype);
+ gst_caps_append (vdec->srccaps, mediacaps);
+ } else {
+ DeleteMediaType (mediatype);
+ }
+ } else {
+ DeleteMediaType (mediatype);
+ }
+
+ }
+ }
+ }
+
+ if (vdec->srccaps)
+ caps = gst_caps_ref (vdec->srccaps);
+
+ beach:
+ gst_object_unref (vdec);
+
+ return caps;
+ }
+
+ static gboolean
+ gst_dshowvideodec_src_setcaps (GstPad * pad, GstCaps * caps)
+ {
+ gboolean ret = FALSE;
+
+ return ret;
+ }
+
+ static gboolean
+ gst_dshowvideodec_flush (GstDshowVideoDec * vdec)
+ {
+ if (!vdec->fakesrc)
+ return FALSE;
+
+ /* flush dshow decoder and reset timestamp */
+ vdec->fakesrc->GetOutputPin()->Flush();
+ vdec->last_ret = GST_FLOW_OK;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec * vdec,
+ const GUID subtype, VIDEOINFOHEADER ** format, guint * size)
+ {
+ IPinPtr output_pin;
+ IEnumMediaTypesPtr enum_mediatypes;
+ HRESULT hres;
+ ULONG fetched;
+ BOOL ret = FALSE;
+
+ if (!vdec->decfilter)
+ return FALSE;
+
+ output_pin = gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("failed getting output pin from the decoder"), (NULL));
+ return FALSE;
+ }
+
+ hres = output_pin->EnumMediaTypes (&enum_mediatypes);
+ if (hres == S_OK && enum_mediatypes) {
+ AM_MEDIA_TYPE *mediatype = NULL;
+
+ enum_mediatypes->Reset();
+ while (hres =
+ enum_mediatypes->Next(1, &mediatype, &fetched),
++ hres == S_OK)
+ {
+ if (IsEqualGUID (mediatype->subtype, subtype) &&
+ IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo))
+ {
+ *size = mediatype->cbFormat;
+ *format = (VIDEOINFOHEADER *)g_malloc0 (*size);
+ memcpy (*format, mediatype->pbFormat, *size);
+ ret = TRUE;
+ }
+ DeleteMediaType (mediatype);
+ if (ret)
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec * vdec)
+ {
+ HRESULT hres = S_FALSE;
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
+ IBaseFilter *srcfilter = NULL;
+ IBaseFilter *sinkfilter = NULL;
+ gboolean ret = FALSE;
+
+ /* create the filter graph manager object */
+ hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
+ IID_IFilterGraph, (LPVOID *) & vdec->filtergraph);
+ if (hres != S_OK || !vdec->filtergraph) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the directshow graph manager (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = vdec->filtergraph->QueryInterface(IID_IMediaFilter,
+ (void **) &vdec->mediafilter);
+ if (hres != S_OK || !vdec->mediafilter) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
+ ("Can't get IMediacontrol interface "
+ "from the graph manager (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ /* create fake src filter */
+ vdec->fakesrc = new FakeSrc();
+ /* Created with a refcount of zero, so increment that */
+ vdec->fakesrc->AddRef();
+
+ hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
+ (void **) &srcfilter);
+ if (FAILED (hres)) {
+ GST_WARNING_OBJECT (vdec, "Failed to QI fakesrc to IBaseFilter");
+ goto error;
+ }
+
+ /* search a decoder filter and create it */
+ vdec->decfilter = gst_dshow_find_filter (
+ klass->entry->input_majortype,
+ klass->entry->input_subtype,
+ klass->entry->output_majortype,
+ klass->entry->output_subtype,
+ klass->entry->preferred_filters);
+ if (vdec->decfilter == NULL) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the decoder filter"), (NULL));
+ goto error;
+ }
+
+ /* create fake sink filter */
+ vdec->fakesink = new VideoFakeSink(vdec);
+ /* Created with a refcount of zero, so increment that */
+ vdec->fakesink->AddRef();
+
+ hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
+ (void **) &sinkfilter);
+ if (FAILED (hres)) {
+ GST_WARNING_OBJECT (vdec, "Failed to QI fakesink to IBaseFilter");
+ goto error;
+ }
+
+ /* add filters to the graph */
+ hres = vdec->filtergraph->AddFilter (srcfilter, L"src");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesrc filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = vdec->filtergraph->AddFilter(vdec->decfilter, L"decoder");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add decoder filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = vdec->filtergraph->AddFilter(sinkfilter, L"sink");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesink filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ vdec->setup = TRUE;
+
+ ret = TRUE;
+
+ done:
+ if (srcfilter)
+ srcfilter->Release();
+ if (sinkfilter)
+ sinkfilter->Release();
+ return ret;
+
+ error:
+ if (vdec->fakesrc) {
+ vdec->fakesrc->Release();
+ vdec->fakesrc = NULL;
+ }
+ if (vdec->decfilter) {
+ vdec->decfilter->Release();
+ vdec->decfilter = NULL;
+ }
+ if (vdec->fakesink) {
+ vdec->fakesink->Release();
+ vdec->fakesink = NULL;
+ }
+ if (vdec->mediafilter) {
+ vdec->mediafilter->Release();
+ vdec->mediafilter = NULL;
+ }
+ if (vdec->filtergraph) {
+ vdec->filtergraph->Release();
+ vdec->filtergraph = NULL;
+ }
+
+ goto done;
+ }
+
+ static gboolean
+ gst_dshowvideodec_destroy_graph_and_filters (GstDshowVideoDec * vdec)
+ {
+ HRESULT hres;
+
+ if (vdec->mediafilter) {
+ vdec->mediafilter->Stop();
+ }
+
+ if (vdec->fakesrc) {
+ if (vdec->filtergraph) {
+ IBaseFilter *filter;
+ hres = vdec->fakesrc->QueryInterface(IID_IBaseFilter,
+ (void **) &filter);
+ if (SUCCEEDED (hres)) {
+ vdec->filtergraph->RemoveFilter(filter);
+ filter->Release();
+ }
+ }
+
+ vdec->fakesrc->Release();
+ vdec->fakesrc = NULL;
+ }
+ if (vdec->decfilter) {
+ if (vdec->filtergraph)
+ vdec->filtergraph->RemoveFilter(vdec->decfilter);
+ vdec->decfilter->Release();
+ vdec->decfilter = NULL;
+ }
+ if (vdec->fakesink) {
+ if (vdec->filtergraph) {
+ IBaseFilter *filter;
+ hres = vdec->fakesink->QueryInterface(IID_IBaseFilter,
+ (void **) &filter);
+ if (SUCCEEDED (hres)) {
+ vdec->filtergraph->RemoveFilter(filter);
+ filter->Release();
+ }
+ }
+
+ vdec->fakesink->Release();
+ vdec->fakesink = NULL;
+ }
+ if (vdec->mediafilter) {
+ vdec->mediafilter->Release();
+ vdec->mediafilter = NULL;
+ }
+ if (vdec->filtergraph) {
+ vdec->filtergraph->Release();
+ vdec->filtergraph = NULL;
+ }
+
+ vdec->setup = FALSE;
+
+ return TRUE;
+ }
+
+ gboolean
+ dshow_vdec_register (GstPlugin * plugin)
+ {
+ GTypeInfo info = {
+ sizeof (GstDshowVideoDecClass),
+ (GBaseInitFunc) gst_dshowvideodec_base_init,
+ NULL,
+ (GClassInitFunc) gst_dshowvideodec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstDshowVideoDec),
+ 0,
+ (GInstanceInitFunc) gst_dshowvideodec_init,
+ };
+ gint i;
+ HRESULT hr;
+
+ GST_DEBUG_CATEGORY_INIT (dshowvideodec_debug, "dshowvideodec", 0,
+ "Directshow filter video decoder");
+
+ hr = CoInitialize (0);
+
+ for (i = 0; i < sizeof (video_dec_codecs) / sizeof (VideoCodecEntry); i++) {
+ GType type;
+ IBaseFilterPtr filter;
+ guint rank = GST_RANK_MARGINAL;
+
+ filter = gst_dshow_find_filter (
+ video_dec_codecs[i].input_majortype,
+ video_dec_codecs[i].input_subtype,
+ video_dec_codecs[i].output_majortype,
+ video_dec_codecs[i].output_subtype,
+ video_dec_codecs[i].preferred_filters);
+ if (filter != NULL) {
+
+ if (video_dec_codecs[i].format == GST_MAKE_FOURCC ('W', 'V', 'C', '1')) {
+ /* FFMPEG WVC1 decoder sucks, get higher priority for ours */
+ rank = GST_RANK_MARGINAL + 2;
+ }
+ GST_DEBUG ("Registering %s with rank %u", video_dec_codecs[i].element_name, rank);
+
+ type = g_type_register_static (GST_TYPE_ELEMENT,
+ video_dec_codecs[i].element_name, &info, (GTypeFlags)0);
+ g_type_set_qdata (type, DSHOW_CODEC_QDATA, (gpointer) (video_dec_codecs + i));
+ if (!gst_element_register (plugin, video_dec_codecs[i].element_name, rank, type)) {
+ return FALSE;
+ }
+ GST_DEBUG ("Registered %s", video_dec_codecs[i].element_name);
+ } else {
+ GST_DEBUG ("Element %s not registered "
+ "(the format is not supported by the system)",
+ video_dec_codecs[i].element_name);
+ }
+ }
+
+ if (SUCCEEDED(hr))
+ CoUninitialize ();
+
+ return TRUE;
+ }
--- /dev/null
- * dvbbasebin.c -
+ /*
- *
++ * dvbbasebin.c -
+ * Copyright (C) 2007 Alessandro Decina
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
++ *
+ * Authors:
+ * Alessandro Decina <alessandro.d@gmail.com>
+ * Reynaldo H. Verdejo Pinochet <reynaldo@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+ #include <stdlib.h>
+ #include <string.h>
+ #include <gst/mpegts/mpegts.h>
+ #include "gstdvbelements.h"
+ #include "dvbbasebin.h"
+ #include "parsechannels.h"
+
+ GST_DEBUG_CATEGORY (dvb_base_bin_debug);
+ #define GST_CAT_DEFAULT dvb_base_bin_debug
+
+ static GstStaticPadTemplate src_template =
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpegts, " "systemstream = (boolean) true ")
+ );
+
+ static GstStaticPadTemplate program_template =
+ GST_STATIC_PAD_TEMPLATE ("program_%u", GST_PAD_SRC,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/mpegts, " "systemstream = (boolean) true ")
+ );
+
+ enum
+ {
+ /* FILL ME */
+ SIGNAL_TUNING_START,
+ SIGNAL_TUNING_DONE,
+ SIGNAL_TUNING_FAIL,
+ SIGNAL_TUNE,
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ ARG_0,
+ PROP_ADAPTER,
+ PROP_FRONTEND,
+ PROP_DISEQC_SRC,
+ PROP_FREQUENCY,
+ PROP_POLARITY,
+ PROP_SYMBOL_RATE,
+ PROP_BANDWIDTH,
+ PROP_CODE_RATE_HP,
+ PROP_CODE_RATE_LP,
+ PROP_GUARD,
+ PROP_MODULATION,
+ PROP_TRANS_MODE,
+ PROP_HIERARCHY,
+ PROP_INVERSION,
+ PROP_PROGRAM_NUMBERS,
+ PROP_STATS_REPORTING_INTERVAL,
+ PROP_TUNING_TIMEOUT,
+ PROP_DELSYS,
+ PROP_PILOT,
+ PROP_ROLLOFF,
+ PROP_STREAM_ID,
+ PROP_BANDWIDTH_HZ,
+ PROP_ISDBT_LAYER_ENABLED,
+ PROP_ISDBT_PARTIAL_RECEPTION,
+ PROP_ISDBT_SOUND_BROADCASTING,
+ PROP_ISDBT_SB_SUBCHANNEL_ID,
+ PROP_ISDBT_SB_SEGMENT_IDX,
+ PROP_ISDBT_SB_SEGMENT_COUNT,
+ PROP_ISDBT_LAYERA_FEC,
+ PROP_ISDBT_LAYERA_MODULATION,
+ PROP_ISDBT_LAYERA_SEGMENT_COUNT,
+ PROP_ISDBT_LAYERA_TIME_INTERLEAVING,
+ PROP_ISDBT_LAYERB_FEC,
+ PROP_ISDBT_LAYERB_MODULATION,
+ PROP_ISDBT_LAYERB_SEGMENT_COUNT,
+ PROP_ISDBT_LAYERB_TIME_INTERLEAVING,
+ PROP_ISDBT_LAYERC_FEC,
+ PROP_ISDBT_LAYERC_MODULATION,
+ PROP_ISDBT_LAYERC_SEGMENT_COUNT,
+ PROP_ISDBT_LAYERC_TIME_INTERLEAVING,
+ PROP_LNB_SLOF,
+ PROP_LNB_LOF1,
+ PROP_LNB_LOF2,
+ PROP_INTERLEAVING
+ };
+
+ typedef struct
+ {
+ guint16 pid;
+ guint usecount;
+ } DvbBaseBinStream;
+
+ typedef struct
+ {
+ gint program_number;
+ guint16 pmt_pid;
+ guint16 pcr_pid;
+ GstMpegtsSection *section;
+ GstMpegtsSection *old_section;
+ const GstMpegtsPMT *pmt;
+ const GstMpegtsPMT *old_pmt;
+ gboolean selected;
+ gboolean pmt_active;
+ gboolean active;
+ GstPad *ghost;
+ } DvbBaseBinProgram;
+
+ static void dvb_base_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void dvb_base_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void dvb_base_bin_dispose (GObject * object);
+ static void dvb_base_bin_finalize (GObject * object);
+
+ static void dvb_base_bin_task (DvbBaseBin * basebin);
+
+ static GstStateChangeReturn dvb_base_bin_change_state (GstElement * element,
+ GstStateChange transition);
+ static void dvb_base_bin_handle_message (GstBin * bin, GstMessage * message);
+ static void dvb_base_bin_pat_info_cb (DvbBaseBin * dvbbasebin,
+ GstMpegtsSection * pat);
+ static void dvb_base_bin_pmt_info_cb (DvbBaseBin * dvbbasebin,
+ GstMpegtsSection * pmt);
+ static GstPad *dvb_base_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+ static void dvb_base_bin_release_pad (GstElement * element, GstPad * pad);
+ static void dvb_base_bin_rebuild_filter (DvbBaseBin * dvbbasebin);
+ static void
+ dvb_base_bin_deactivate_program (DvbBaseBin * dvbbasebin,
+ DvbBaseBinProgram * program);
+
+ static void dvb_base_bin_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+ static void dvb_base_bin_program_destroy (gpointer data);
+
+ /* Proxy callbacks for dvbsrc signals */
+ static void tuning_start_signal_cb (GObject * object, DvbBaseBin * dvbbasebin);
+ static void tuning_done_signal_cb (GObject * object, DvbBaseBin * dvbbasebin);
+ static void tuning_fail_signal_cb (GObject * object, DvbBaseBin * dvbbasebin);
+
+ static void dvb_base_bin_do_tune (DvbBaseBin * dvbbasebin);
+
+ #define dvb_base_bin_parent_class parent_class
+ G_DEFINE_TYPE_EXTENDED (DvbBaseBin, dvb_base_bin, GST_TYPE_BIN,
+ 0,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ dvb_base_bin_uri_handler_init));
+ #define _do_init \
+ GST_DEBUG_CATEGORY_INIT (dvb_base_bin_debug, "dvbbasebin", 0, "DVB bin"); \
+ cam_init (); \
+ dvb_element_init (plugin);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dvbbasebin, "dvbbasebin", GST_RANK_NONE,
+ GST_TYPE_DVB_BASE_BIN, _do_init);
+
+ static void
+ dvb_base_bin_ref_stream (DvbBaseBinStream * stream)
+ {
+ g_return_if_fail (stream != NULL);
+ stream->usecount++;
+ }
+
+ static void
+ dvb_base_bin_unref_stream (DvbBaseBinStream * stream)
+ {
+ g_return_if_fail (stream != NULL);
+ g_return_if_fail (stream->usecount > 0);
+ stream->usecount--;
+ }
+
+ static DvbBaseBinStream *
+ dvb_base_bin_add_stream (DvbBaseBin * dvbbasebin, guint16 pid)
+ {
+ DvbBaseBinStream *stream;
+
+ stream = g_new0 (DvbBaseBinStream, 1);
+ stream->pid = pid;
+ stream->usecount = 0;
+
+ g_hash_table_insert (dvbbasebin->streams,
+ GINT_TO_POINTER ((gint) pid), stream);
+
+ return stream;
+ }
+
+ static DvbBaseBinStream *
+ dvb_base_bin_get_stream (DvbBaseBin * dvbbasebin, guint16 pid)
+ {
+ return (DvbBaseBinStream *) g_hash_table_lookup (dvbbasebin->streams,
+ GINT_TO_POINTER ((gint) pid));
+ }
+
+ static DvbBaseBinProgram *
+ dvb_base_bin_add_program (DvbBaseBin * dvbbasebin, gint program_number)
+ {
+ DvbBaseBinProgram *program;
+
+ program = g_new0 (DvbBaseBinProgram, 1);
+ program->program_number = program_number;
+ program->selected = FALSE;
+ program->active = FALSE;
+ program->pmt_pid = G_MAXUINT16;
+ program->pcr_pid = G_MAXUINT16;
+ program->pmt = NULL;
+ program->old_pmt = NULL;
+
+ g_hash_table_insert (dvbbasebin->programs,
+ GINT_TO_POINTER (program_number), program);
+
+ return program;
+ }
+
+ static DvbBaseBinProgram *
+ dvb_base_bin_get_program (DvbBaseBin * dvbbasebin, gint program_number)
+ {
+ return (DvbBaseBinProgram *) g_hash_table_lookup (dvbbasebin->programs,
+ GINT_TO_POINTER (program_number));
+ }
+
+
+ static guint dvb_base_bin_signals[LAST_SIGNAL] = { 0 };
+
+ static void
+ tuning_start_signal_cb (GObject * object, DvbBaseBin * dvbbasebin)
+ {
+ g_signal_emit (dvbbasebin, dvb_base_bin_signals[SIGNAL_TUNING_START], 0);
+ }
+
+ static void
+ tuning_done_signal_cb (GObject * object, DvbBaseBin * dvbbasebin)
+ {
+ g_signal_emit (dvbbasebin, dvb_base_bin_signals[SIGNAL_TUNING_DONE], 0);
+ }
+
+ static void
+ tuning_fail_signal_cb (GObject * object, DvbBaseBin * dvbbasebin)
+ {
+ g_signal_emit (dvbbasebin, dvb_base_bin_signals[SIGNAL_TUNING_FAIL], 0);
+ }
+
+ static void
+ dvb_base_bin_do_tune (DvbBaseBin * dvbbasebin)
+ {
+ g_signal_emit_by_name (dvbbasebin->dvbsrc, "tune", NULL);
+ }
+
+ static void
+ dvb_base_bin_class_init (DvbBaseBinClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBinClass *bin_class;
+ DvbBaseBinClass *dvbbasebin_class;
+ GstElementFactory *dvbsrc_factory;
+ GObjectClass *dvbsrc_class;
+ typedef struct
+ {
+ guint prop_id;
+ const gchar *prop_name;
+ } ProxyedProperty;
+ ProxyedProperty *walk;
+ ProxyedProperty proxyed_properties[] = {
+ {PROP_ADAPTER, "adapter"},
+ {PROP_FRONTEND, "frontend"},
+ {PROP_DISEQC_SRC, "diseqc-source"},
+ {PROP_FREQUENCY, "frequency"},
+ {PROP_POLARITY, "polarity"},
+ {PROP_SYMBOL_RATE, "symbol-rate"},
+ #ifndef GST_REMOVE_DEPRECATED
+ {PROP_BANDWIDTH, "bandwidth"},
+ #endif
+ {PROP_CODE_RATE_HP, "code-rate-hp"},
+ {PROP_CODE_RATE_LP, "code-rate-lp"},
+ {PROP_GUARD, "guard"},
+ {PROP_MODULATION, "modulation"},
+ {PROP_TRANS_MODE, "trans-mode"},
+ {PROP_HIERARCHY, "hierarchy"},
+ {PROP_INVERSION, "inversion"},
+ {PROP_STATS_REPORTING_INTERVAL, "stats-reporting-interval"},
+ {PROP_TUNING_TIMEOUT, "tuning-timeout"},
+ {PROP_DELSYS, "delsys"},
+ {PROP_PILOT, "pilot"},
+ {PROP_ROLLOFF, "rolloff"},
+ {PROP_STREAM_ID, "stream-id"},
+ {PROP_BANDWIDTH_HZ, "bandwidth-hz"},
+ {PROP_ISDBT_LAYER_ENABLED, "isdbt-layer-enabled"},
+ {PROP_ISDBT_PARTIAL_RECEPTION, "isdbt-partial-reception"},
+ {PROP_ISDBT_SOUND_BROADCASTING, "isdbt-sound-broadcasting"},
+ {PROP_ISDBT_SB_SUBCHANNEL_ID, "isdbt-sb-subchannel-id"},
+ {PROP_ISDBT_SB_SEGMENT_IDX, "isdbt-sb-segment-idx"},
+ {PROP_ISDBT_SB_SEGMENT_COUNT, "isdbt-sb-segment-count"},
+ {PROP_ISDBT_LAYERA_FEC, "isdbt-layera-fec"},
+ {PROP_ISDBT_LAYERA_MODULATION, "isdbt-layera-modulation"},
+ {PROP_ISDBT_LAYERA_SEGMENT_COUNT, "isdbt-layera-segment-count"},
+ {PROP_ISDBT_LAYERA_TIME_INTERLEAVING, "isdbt-layera-time-interleaving"},
+ {PROP_ISDBT_LAYERB_FEC, "isdbt-layerb-fec"},
+ {PROP_ISDBT_LAYERB_MODULATION, "isdbt-layerb-modulation"},
+ {PROP_ISDBT_LAYERB_SEGMENT_COUNT, "isdbt-layerb-segment-count"},
+ {PROP_ISDBT_LAYERB_TIME_INTERLEAVING, "isdbt-layerb-time-interleaving"},
+ {PROP_ISDBT_LAYERC_FEC, "isdbt-layerc-fec"},
+ {PROP_ISDBT_LAYERC_MODULATION, "isdbt-layerc-modulation"},
+ {PROP_ISDBT_LAYERC_SEGMENT_COUNT, "isdbt-layerc-segment-count"},
+ {PROP_ISDBT_LAYERC_TIME_INTERLEAVING, "isdbt-layerc-time-interleaving"},
+ {PROP_LNB_SLOF, "lnb-slof"},
+ {PROP_LNB_LOF1, "lnb-lof1"},
+ {PROP_LNB_LOF2, "lnb-lof2"},
+ {PROP_INTERLEAVING, "interleaving"},
+ {0, NULL}
+ };
+
+ bin_class = GST_BIN_CLASS (klass);
+ bin_class->handle_message = dvb_base_bin_handle_message;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ element_class->change_state = dvb_base_bin_change_state;
+ element_class->request_new_pad = dvb_base_bin_request_new_pad;
+ element_class->release_pad = dvb_base_bin_release_pad;
+
+ gst_element_class_add_static_pad_template (element_class, &program_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class, "DVB bin",
+ "Source/Bin/Video",
+ "Access descramble and split DVB streams",
+ "Alessandro Decina <alessandro.d@gmail.com>\n"
+ "Reynaldo H. Verdejo Pinochet <reynaldo@osg.samsung.com>");
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gobject_class->set_property = dvb_base_bin_set_property;
+ gobject_class->get_property = dvb_base_bin_get_property;
+ gobject_class->dispose = dvb_base_bin_dispose;
+ gobject_class->finalize = dvb_base_bin_finalize;
+
+ dvbbasebin_class = (DvbBaseBinClass *) klass;
+ dvbbasebin_class->do_tune = dvb_base_bin_do_tune;
+
+ /* install dvbsrc properties */
+ dvbsrc_factory = gst_element_factory_find ("dvbsrc");
+ dvbsrc_class =
+ g_type_class_ref (gst_element_factory_get_element_type (dvbsrc_factory));
+ walk = proxyed_properties;
+ while (walk->prop_name != NULL) {
+ GParamSpec *pspec;
+ GParamSpec *our_pspec;
+
+ pspec = g_object_class_find_property (dvbsrc_class, walk->prop_name);
+ if (pspec != NULL) {
+ GType param_type = G_PARAM_SPEC_TYPE (pspec);
+
+ if (param_type == G_TYPE_PARAM_INT) {
+ GParamSpecInt *src_pspec = G_PARAM_SPEC_INT (pspec);
+
+ our_pspec = g_param_spec_int (g_param_spec_get_name (pspec),
+ g_param_spec_get_nick (pspec), g_param_spec_get_blurb (pspec),
+ src_pspec->minimum, src_pspec->maximum, src_pspec->default_value,
+ pspec->flags);
+ } else if (param_type == G_TYPE_PARAM_UINT) {
+ GParamSpecUInt *src_pspec = G_PARAM_SPEC_UINT (pspec);
+
+ our_pspec = g_param_spec_uint (g_param_spec_get_name (pspec),
+ g_param_spec_get_nick (pspec), g_param_spec_get_blurb (pspec),
+ src_pspec->minimum, src_pspec->maximum, src_pspec->default_value,
+ pspec->flags);
+ } else if (param_type == G_TYPE_PARAM_UINT64) {
+ GParamSpecUInt64 *src_pspec = G_PARAM_SPEC_UINT64 (pspec);
+
+ our_pspec = g_param_spec_uint64 (g_param_spec_get_name (pspec),
+ g_param_spec_get_nick (pspec), g_param_spec_get_blurb (pspec),
+ src_pspec->minimum, src_pspec->maximum, src_pspec->default_value,
+ pspec->flags);
+ } else if (param_type == G_TYPE_PARAM_STRING) {
+ GParamSpecString *src_pspec = G_PARAM_SPEC_STRING (pspec);
+
+ our_pspec = g_param_spec_string (g_param_spec_get_name (pspec),
+ g_param_spec_get_nick (pspec), g_param_spec_get_blurb (pspec),
+ src_pspec->default_value, pspec->flags);
+ } else if (param_type == G_TYPE_PARAM_ENUM) {
+ GParamSpecEnum *src_pspec = G_PARAM_SPEC_ENUM (pspec);
+
+ our_pspec = g_param_spec_enum (g_param_spec_get_name (pspec),
+ g_param_spec_get_nick (pspec), g_param_spec_get_blurb (pspec),
+ pspec->value_type, src_pspec->default_value, pspec->flags);
+ } else {
+ GST_ERROR ("Unsupported property type %s for property %s",
+ g_type_name (param_type), g_param_spec_get_name (pspec));
+ ++walk;
+ continue;
+ }
+
+ g_object_class_install_property (gobject_class, walk->prop_id, our_pspec);
+ } else {
+ g_warning ("dvbsrc has no property named %s", walk->prop_name);
+ }
+ ++walk;
+ }
+ g_type_class_unref (dvbsrc_class);
+
+ g_object_class_install_property (gobject_class, PROP_PROGRAM_NUMBERS,
+ g_param_spec_string ("program-numbers",
+ "Program Numbers",
+ "Colon separated list of programs", "", G_PARAM_READWRITE));
+ /**
+ * DvbBaseBin::tuning-start:
+ * @dvbbasebin: the element on which the signal is emitted
+ *
+ * Signal emitted when the element first attempts to tune the
+ * frontend tunner to a given frequency.
+ */
+ dvb_base_bin_signals[SIGNAL_TUNING_START] =
+ g_signal_new ("tuning-start", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0);
+ /**
+ * DvbBaseBin::tuning-done:
+ * @dvbbasebin: the element on which the signal is emitted
+ *
+ * Signal emitted when the tunner has successfully got a lock on a signal.
+ */
+ dvb_base_bin_signals[SIGNAL_TUNING_DONE] =
+ g_signal_new ("tuning-done", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0);
+ /**
+ * DvbBaseBin::tuning-fail:
+ * @dvbbasebin: the element on which the signal is emitted
+ *
+ * Signal emitted when the tunner failed to get a lock on the
+ * signal.
+ */
+ dvb_base_bin_signals[SIGNAL_TUNING_FAIL] =
+ g_signal_new ("tuning-fail", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0);
+
+ /**
+ * DvbBaseBin::tune:
+ * @dvbbasesink: the element on which the signal is emitted
+ *
+ * Signal emitted from the application to the element, instructing it
+ * to tune.
+ */
+ dvb_base_bin_signals[SIGNAL_TUNE] =
+ g_signal_new ("tune", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (DvbBaseBinClass, do_tune), NULL, NULL, NULL,
+ G_TYPE_NONE, 0);
+ }
+
+ static void
+ dvb_base_bin_reset (DvbBaseBin * dvbbasebin)
+ {
+ if (dvbbasebin->hwcam) {
+ cam_device_close (dvbbasebin->hwcam);
+ cam_device_free (dvbbasebin->hwcam);
+ dvbbasebin->hwcam = NULL;
+ }
+ dvbbasebin->trycam = TRUE;
+ }
+
+ static const gint16 initial_pids[] = { 0, 1, 0x10, 0x11, 0x12, 0x14, -1 };
+
+ static void
+ dvb_base_bin_init (DvbBaseBin * dvbbasebin)
+ {
+ DvbBaseBinStream *stream;
+ GstPad *ghost, *pad;
+ int i;
+
+ dvbbasebin->dvbsrc = gst_element_factory_make ("dvbsrc", NULL);
+ dvbbasebin->buffer_queue = gst_element_factory_make ("queue", NULL);
+ dvbbasebin->tsparse = gst_element_factory_make ("tsparse", NULL);
+
+ g_object_set (dvbbasebin->buffer_queue, "max-size-buffers", 0,
+ "max-size-bytes", 0, "max-size-time", (guint64) 0, NULL);
+
+ gst_bin_add_many (GST_BIN (dvbbasebin), dvbbasebin->dvbsrc,
+ dvbbasebin->buffer_queue, dvbbasebin->tsparse, NULL);
+
+ gst_element_link_many (dvbbasebin->dvbsrc,
+ dvbbasebin->buffer_queue, dvbbasebin->tsparse, NULL);
+
+ /* Proxy dvbsrc signals */
+ g_signal_connect (dvbbasebin->dvbsrc, "tuning-start",
+ G_CALLBACK (tuning_start_signal_cb), dvbbasebin);
+ g_signal_connect (dvbbasebin->dvbsrc, "tuning-done",
+ G_CALLBACK (tuning_done_signal_cb), dvbbasebin);
+ g_signal_connect (dvbbasebin->dvbsrc, "tuning-fail",
+ G_CALLBACK (tuning_fail_signal_cb), dvbbasebin);
+
+ /* Expose tsparse source pad */
+ if (dvbbasebin->tsparse != NULL) {
+ pad = gst_element_get_static_pad (dvbbasebin->tsparse, "src");
+ ghost = gst_ghost_pad_new ("src", pad);
+ gst_object_unref (pad);
+ } else {
+ ghost = gst_ghost_pad_new_no_target ("src", GST_PAD_SRC);
+ }
+ gst_element_add_pad (GST_ELEMENT (dvbbasebin), ghost);
+
+ dvbbasebin->programs = g_hash_table_new_full (g_direct_hash, g_direct_equal,
+ NULL, dvb_base_bin_program_destroy);
+ dvbbasebin->streams = g_hash_table_new_full (g_direct_hash, g_direct_equal,
+ NULL, g_free);
+
+ dvbbasebin->pmtlist = NULL;
+ dvbbasebin->pmtlist_changed = FALSE;
+
+ dvbbasebin->disposed = FALSE;
+ dvb_base_bin_reset (dvbbasebin);
+
+ /* add PAT, CAT, NIT, SDT, EIT, TDT to pids filter for dvbsrc */
+ i = 0;
+ while (initial_pids[i] >= 0) {
+ stream = dvb_base_bin_add_stream (dvbbasebin, (guint16) initial_pids[i]);
+ dvb_base_bin_ref_stream (stream);
+ i++;
+ }
+ dvb_base_bin_rebuild_filter (dvbbasebin);
+
+ g_rec_mutex_init (&dvbbasebin->lock);
+ dvbbasebin->task =
+ gst_task_new ((GstTaskFunction) dvb_base_bin_task, dvbbasebin, NULL);
+ gst_task_set_lock (dvbbasebin->task, &dvbbasebin->lock);
+ dvbbasebin->poll = gst_poll_new (TRUE);
+ }
+
+ static void
+ dvb_base_bin_dispose (GObject * object)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (object);
+
+ if (!dvbbasebin->disposed) {
+ /* remove mpegtsparse BEFORE dvbsrc, since the mpegtsparse::pad-removed
+ * signal handler uses dvbsrc */
+ dvb_base_bin_reset (dvbbasebin);
+ if (dvbbasebin->tsparse != NULL)
+ gst_bin_remove (GST_BIN (dvbbasebin), dvbbasebin->tsparse);
+ gst_bin_remove (GST_BIN (dvbbasebin), dvbbasebin->dvbsrc);
+ gst_bin_remove (GST_BIN (dvbbasebin), dvbbasebin->buffer_queue);
+ g_free (dvbbasebin->program_numbers);
+ gst_poll_free (dvbbasebin->poll);
+ gst_object_unref (dvbbasebin->task);
+ g_rec_mutex_clear (&dvbbasebin->lock);
+ dvbbasebin->disposed = TRUE;
+ }
+
+ if (G_OBJECT_CLASS (parent_class)->dispose)
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ dvb_base_bin_finalize (GObject * object)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (object);
+
+ g_hash_table_destroy (dvbbasebin->streams);
+ g_hash_table_destroy (dvbbasebin->programs);
+ g_list_free (dvbbasebin->pmtlist);
+
+ if (G_OBJECT_CLASS (parent_class)->finalize)
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ dvb_base_bin_set_program_numbers (DvbBaseBin * dvbbasebin, const gchar * pn)
+ {
+ gchar **strv, **walk;
+ DvbBaseBinProgram *program;
+
+ /* Split up and update programs */
+ strv = g_strsplit (pn, ":", 0);
+
+ for (walk = strv; *walk; walk++) {
+ gint program_number = strtol (*walk, NULL, 0);
+
+ program = dvb_base_bin_get_program (dvbbasebin, program_number);
+ if (program == NULL) {
+ program = dvb_base_bin_add_program (dvbbasebin, program_number);
+ program->selected = TRUE;
+ }
+ }
+
+ g_strfreev (strv);
+
+ /* FIXME : Deactivate programs no longer selected */
+
+ g_free (dvbbasebin->program_numbers);
+ dvbbasebin->program_numbers = g_strdup (pn);
+
+ if (0)
+ dvb_base_bin_deactivate_program (dvbbasebin, NULL);
+ }
+
+ static void
+ dvb_base_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (object);
+
+ switch (prop_id) {
+ case PROP_ADAPTER:
+ case PROP_DISEQC_SRC:
+ case PROP_FRONTEND:
+ case PROP_FREQUENCY:
+ case PROP_POLARITY:
+ case PROP_SYMBOL_RATE:
+ case PROP_BANDWIDTH:
+ case PROP_CODE_RATE_HP:
+ case PROP_CODE_RATE_LP:
+ case PROP_GUARD:
+ case PROP_MODULATION:
+ case PROP_TRANS_MODE:
+ case PROP_HIERARCHY:
+ case PROP_INVERSION:
+ case PROP_STATS_REPORTING_INTERVAL:
+ case PROP_TUNING_TIMEOUT:
+ case PROP_DELSYS:
+ case PROP_PILOT:
+ case PROP_ROLLOFF:
+ case PROP_STREAM_ID:
+ case PROP_BANDWIDTH_HZ:
+ case PROP_ISDBT_LAYER_ENABLED:
+ case PROP_ISDBT_PARTIAL_RECEPTION:
+ case PROP_ISDBT_SOUND_BROADCASTING:
+ case PROP_ISDBT_SB_SUBCHANNEL_ID:
+ case PROP_ISDBT_SB_SEGMENT_IDX:
+ case PROP_ISDBT_SB_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERA_FEC:
+ case PROP_ISDBT_LAYERA_MODULATION:
+ case PROP_ISDBT_LAYERA_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERA_TIME_INTERLEAVING:
+ case PROP_ISDBT_LAYERB_FEC:
+ case PROP_ISDBT_LAYERB_MODULATION:
+ case PROP_ISDBT_LAYERB_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERB_TIME_INTERLEAVING:
+ case PROP_ISDBT_LAYERC_FEC:
+ case PROP_ISDBT_LAYERC_MODULATION:
+ case PROP_ISDBT_LAYERC_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERC_TIME_INTERLEAVING:
+ case PROP_LNB_SLOF:
+ case PROP_LNB_LOF1:
+ case PROP_LNB_LOF2:
+ case PROP_INTERLEAVING:
+ /* FIXME: check if we can tune (state < PLAYING || program-numbers == "") */
+ g_object_set_property (G_OBJECT (dvbbasebin->dvbsrc), pspec->name, value);
+ break;
+ case PROP_PROGRAM_NUMBERS:
+ dvb_base_bin_set_program_numbers (dvbbasebin, g_value_get_string (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static void
+ dvb_base_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (object);
+
+ switch (prop_id) {
+ case PROP_ADAPTER:
+ case PROP_FRONTEND:
+ case PROP_DISEQC_SRC:
+ case PROP_FREQUENCY:
+ case PROP_POLARITY:
+ case PROP_SYMBOL_RATE:
+ case PROP_BANDWIDTH:
+ case PROP_CODE_RATE_HP:
+ case PROP_CODE_RATE_LP:
+ case PROP_GUARD:
+ case PROP_MODULATION:
+ case PROP_TRANS_MODE:
+ case PROP_HIERARCHY:
+ case PROP_INVERSION:
+ case PROP_STATS_REPORTING_INTERVAL:
+ case PROP_TUNING_TIMEOUT:
+ case PROP_DELSYS:
+ case PROP_PILOT:
+ case PROP_ROLLOFF:
+ case PROP_STREAM_ID:
+ case PROP_BANDWIDTH_HZ:
+ case PROP_ISDBT_LAYER_ENABLED:
+ case PROP_ISDBT_PARTIAL_RECEPTION:
+ case PROP_ISDBT_SOUND_BROADCASTING:
+ case PROP_ISDBT_SB_SUBCHANNEL_ID:
+ case PROP_ISDBT_SB_SEGMENT_IDX:
+ case PROP_ISDBT_SB_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERA_FEC:
+ case PROP_ISDBT_LAYERA_MODULATION:
+ case PROP_ISDBT_LAYERA_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERA_TIME_INTERLEAVING:
+ case PROP_ISDBT_LAYERB_FEC:
+ case PROP_ISDBT_LAYERB_MODULATION:
+ case PROP_ISDBT_LAYERB_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERB_TIME_INTERLEAVING:
+ case PROP_ISDBT_LAYERC_FEC:
+ case PROP_ISDBT_LAYERC_MODULATION:
+ case PROP_ISDBT_LAYERC_SEGMENT_COUNT:
+ case PROP_ISDBT_LAYERC_TIME_INTERLEAVING:
+ case PROP_LNB_SLOF:
+ case PROP_LNB_LOF1:
+ case PROP_LNB_LOF2:
+ case PROP_INTERLEAVING:
+ g_object_get_property (G_OBJECT (dvbbasebin->dvbsrc), pspec->name, value);
+ break;
+ case PROP_PROGRAM_NUMBERS:
+ g_value_set_string (value, dvbbasebin->program_numbers);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ }
+
+ static GstPad *
+ dvb_base_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (element);
+ GstPad *pad;
+ GstPad *ghost;
+ gchar *pad_name;
+
+ GST_DEBUG_OBJECT (dvbbasebin, "New pad requested %s", GST_STR_NULL (name));
+
+ if (dvbbasebin->tsparse == NULL)
+ return NULL;
+
+ if (name == NULL)
+ name = GST_PAD_TEMPLATE_NAME_TEMPLATE (templ);
+
+ pad = gst_element_request_pad_simple (dvbbasebin->tsparse, name);
+ if (pad == NULL)
+ return NULL;
+
+ pad_name = gst_pad_get_name (pad);
+ ghost = gst_ghost_pad_new (pad_name, pad);
+ gst_object_unref (pad);
+ g_free (pad_name);
+ gst_element_add_pad (element, ghost);
++ gst_object_unref(pad);
+
+ return ghost;
+ }
+
+ static void
+ dvb_base_bin_release_pad (GstElement * element, GstPad * pad)
+ {
+ GstGhostPad *ghost;
+ GstPad *target;
+
+ g_return_if_fail (GST_IS_DVB_BASE_BIN (element));
+
+ ghost = GST_GHOST_PAD (pad);
+ target = gst_ghost_pad_get_target (ghost);
+ gst_element_release_request_pad (GST_ELEMENT (GST_DVB_BASE_BIN
+ (element)->tsparse), target);
+ gst_object_unref (target);
+
+ gst_element_remove_pad (element, pad);
+ }
+
+ static void
+ dvb_base_bin_reset_pmtlist (DvbBaseBin * dvbbasebin)
+ {
+ CamConditionalAccessPmtFlag flag;
+ GList *walk;
+ GstMpegtsPMT *pmt;
+
+ walk = dvbbasebin->pmtlist;
+ while (walk) {
+ if (walk->prev == NULL) {
+ if (walk->next == NULL)
+ flag = CAM_CONDITIONAL_ACCESS_PMT_FLAG_ONLY;
+ else
+ flag = CAM_CONDITIONAL_ACCESS_PMT_FLAG_FIRST;
+ } else {
+ if (walk->next == NULL)
+ flag = CAM_CONDITIONAL_ACCESS_PMT_FLAG_LAST;
+ else
+ flag = CAM_CONDITIONAL_ACCESS_PMT_FLAG_MORE;
+ }
+
+ pmt = (GstMpegtsPMT *) walk->data;
+ cam_device_set_pmt (dvbbasebin->hwcam, pmt, flag);
+
+ walk = walk->next;
+ }
+
+ dvbbasebin->pmtlist_changed = FALSE;
+ }
+
+ static void
+ dvb_base_bin_init_cam (DvbBaseBin * dvbbasebin)
+ {
+ gint adapter;
+ gchar *ca_file;
+
+ g_object_get (dvbbasebin->dvbsrc, "adapter", &adapter, NULL);
+ /* TODO: handle multiple cams */
+ ca_file = g_strdup_printf ("/dev/dvb/adapter%d/ca0", adapter);
+ if (g_file_test (ca_file, G_FILE_TEST_EXISTS)) {
+ dvbbasebin->hwcam = cam_device_new ();
+ /* device_open() can block up to 5s ! */
+ if (!cam_device_open (dvbbasebin->hwcam, ca_file)) {
+ GST_ERROR_OBJECT (dvbbasebin, "could not open %s", ca_file);
+ cam_device_free (dvbbasebin->hwcam);
+ dvbbasebin->hwcam = NULL;
+ }
+ }
+
+ dvbbasebin->trycam = FALSE;
+
+ g_free (ca_file);
+ }
+
+ static GstStateChangeReturn
+ dvb_base_bin_change_state (GstElement * element, GstStateChange transition)
+ {
+ DvbBaseBin *dvbbasebin;
+ GstStateChangeReturn ret;
+
+ dvbbasebin = GST_DVB_BASE_BIN (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (dvbbasebin->tsparse == NULL) {
+ GST_ELEMENT_ERROR (dvbbasebin, CORE, MISSING_PLUGIN, (NULL),
+ ("No 'tsparse' element, check your GStreamer installation."));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_poll_set_flushing (dvbbasebin->poll, FALSE);
+ g_rec_mutex_lock (&dvbbasebin->lock);
+ gst_task_start (dvbbasebin->task);
+ g_rec_mutex_unlock (&dvbbasebin->lock);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_poll_set_flushing (dvbbasebin->poll, TRUE);
+ g_rec_mutex_lock (&dvbbasebin->lock);
+ gst_task_stop (dvbbasebin->task);
+ g_rec_mutex_unlock (&dvbbasebin->lock);
+ dvb_base_bin_reset (dvbbasebin);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ foreach_stream_build_filter (gpointer key, gpointer value, gpointer user_data)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (user_data);
+ DvbBaseBinStream *stream = (DvbBaseBinStream *) value;
+ gchar *tmp, *pid;
+
+ GST_DEBUG ("stream %d usecount %d", stream->pid, stream->usecount);
+
+ if (stream->usecount > 0) {
+ /* TODO: use g_strjoinv FTW */
+ tmp = dvbbasebin->filter;
+ pid = g_strdup_printf ("%d", stream->pid);
+ dvbbasebin->filter = g_strjoin (":", pid, dvbbasebin->filter, NULL);
+
+ g_free (pid);
+ g_free (tmp);
+ }
+ }
+
+ static void
+ dvb_base_bin_rebuild_filter (DvbBaseBin * dvbbasebin)
+ {
+ g_hash_table_foreach (dvbbasebin->streams,
+ foreach_stream_build_filter, dvbbasebin);
+
+ if (dvbbasebin->filter == NULL)
+ /* fix dvbsrc to handle NULL filter */
+ dvbbasebin->filter = g_strdup ("");
+
+ GST_INFO_OBJECT (dvbbasebin, "rebuilt filter %s", dvbbasebin->filter);
+
+ /* FIXME: find a way to not add unwanted pids controlled by app */
+ g_object_set (dvbbasebin->dvbsrc, "pids", dvbbasebin->filter, NULL);
+ g_free (dvbbasebin->filter);
+ dvbbasebin->filter = NULL;
+ }
+
+ static void
+ dvb_base_bin_remove_pmt_streams (DvbBaseBin * dvbbasebin,
+ const GstMpegtsPMT * pmt)
+ {
+ gint i;
+ DvbBaseBinStream *stream;
+
+ for (i = 0; i < pmt->streams->len; i++) {
+ GstMpegtsPMTStream *pmtstream = g_ptr_array_index (pmt->streams, i);
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, pmtstream->pid);
+ if (stream == NULL) {
+ GST_WARNING_OBJECT (dvbbasebin, "removing unknown stream %d ??",
+ pmtstream->pid);
+ continue;
+ }
+
+ dvb_base_bin_unref_stream (stream);
+ }
+ }
+
+ static void
+ dvb_base_bin_add_pmt_streams (DvbBaseBin * dvbbasebin, const GstMpegtsPMT * pmt)
+ {
+ DvbBaseBinStream *stream;
+ gint i;
+
+ for (i = 0; i < pmt->streams->len; i++) {
+ GstMpegtsPMTStream *pmtstream = g_ptr_array_index (pmt->streams, i);
+
+ GST_DEBUG ("filtering stream %d stream_type %d", pmtstream->pid,
+ pmtstream->stream_type);
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, pmtstream->pid);
+ if (stream == NULL)
+ stream = dvb_base_bin_add_stream (dvbbasebin, pmtstream->pid);
+ dvb_base_bin_ref_stream (stream);
+ }
+ }
+
+ static void
+ dvb_base_bin_activate_program (DvbBaseBin * dvbbasebin,
+ DvbBaseBinProgram * program)
+ {
+ DvbBaseBinStream *stream;
+
+ if (program->old_pmt) {
+ dvb_base_bin_remove_pmt_streams (dvbbasebin, program->old_pmt);
+ dvbbasebin->pmtlist = g_list_remove (dvbbasebin->pmtlist, program->old_pmt);
+ }
+
+ /* activate the PMT and PCR streams. If the PCR stream is in the PMT its
+ * usecount will be incremented by 2 here and decremented by 2 when the
+ * program is deactivated */
+ if (!program->pmt_active) {
+ stream = dvb_base_bin_get_stream (dvbbasebin, program->pmt_pid);
+ if (stream == NULL)
+ stream = dvb_base_bin_add_stream (dvbbasebin, program->pmt_pid);
+ dvb_base_bin_ref_stream (stream);
+ program->pmt_active = TRUE;
+ }
+
+ if (program->pmt) {
+ guint16 old_pcr_pid;
+
+ old_pcr_pid = program->pcr_pid;
+ program->pcr_pid = program->pmt->pcr_pid;
+ if (old_pcr_pid != G_MAXUINT16 && old_pcr_pid != program->pcr_pid) {
+ dvb_base_bin_unref_stream (dvb_base_bin_get_stream (dvbbasebin,
+ old_pcr_pid));
+ }
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, program->pcr_pid);
+ if (stream == NULL)
+ stream = dvb_base_bin_add_stream (dvbbasebin, program->pcr_pid);
+ dvb_base_bin_ref_stream (stream);
+
+ dvb_base_bin_add_pmt_streams (dvbbasebin, program->pmt);
+ dvbbasebin->pmtlist =
+ g_list_append (dvbbasebin->pmtlist, (gpointer) program->pmt);
+ dvbbasebin->pmtlist_changed = TRUE;
+ program->active = TRUE;
+ }
+
+ dvb_base_bin_rebuild_filter (dvbbasebin);
+ }
+
+ static void
+ dvb_base_bin_deactivate_program (DvbBaseBin * dvbbasebin,
+ DvbBaseBinProgram * program)
+ {
+ DvbBaseBinStream *stream;
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, program->pmt_pid);
+ if (stream != NULL) {
+ dvb_base_bin_unref_stream (stream);
+ }
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, program->pcr_pid);
+ if (stream != NULL) {
+ dvb_base_bin_unref_stream (stream);
+ }
+
+ if (program->pmt) {
+ dvb_base_bin_remove_pmt_streams (dvbbasebin, program->pmt);
+ dvbbasebin->pmtlist = g_list_remove (dvbbasebin->pmtlist, program->pmt);
+ dvbbasebin->pmtlist_changed = TRUE;
+ }
+
+ dvb_base_bin_rebuild_filter (dvbbasebin);
+ program->pmt_active = FALSE;
+ program->active = FALSE;
+ }
+
+ static void
+ dvb_base_bin_handle_message (GstBin * bin, GstMessage * message)
+ {
+ DvbBaseBin *dvbbasebin;
+
+ dvbbasebin = GST_DVB_BASE_BIN (bin);
+
+ /* note: message->src might be a GstPad, so use element cast w/o typecheck */
+ if (GST_ELEMENT_CAST (message->src) == dvbbasebin->tsparse) {
+ GstMpegtsSection *section = gst_message_parse_mpegts_section (message);
+
+ if (section) {
+ switch (GST_MPEGTS_SECTION_TYPE (section)) {
+ case GST_MPEGTS_SECTION_PAT:
+ dvb_base_bin_pat_info_cb (dvbbasebin, section);
+ break;
+ case GST_MPEGTS_SECTION_PMT:
+ dvb_base_bin_pmt_info_cb (dvbbasebin, section);
+ break;
+ default:
+ break;
+ }
+ gst_mpegts_section_unref (section);
+ }
+ }
+
+ /* chain up */
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ }
+
+
+
+ static void
+ dvb_base_bin_pat_info_cb (DvbBaseBin * dvbbasebin, GstMpegtsSection * section)
+ {
+ GPtrArray *pat;
+ DvbBaseBinProgram *program;
+ DvbBaseBinStream *stream;
+ guint old_pmt_pid;
+ gint i;
+ gboolean rebuild_filter = FALSE;
+
+ if (!(pat = gst_mpegts_section_get_pat (section))) {
+ GST_WARNING_OBJECT (dvbbasebin, "got invalid PAT");
+ return;
+ }
+
+ for (i = 0; i < pat->len; i++) {
+ GstMpegtsPatProgram *patp = g_ptr_array_index (pat, i);
+
+ program = dvb_base_bin_get_program (dvbbasebin, patp->program_number);
+ if (program == NULL)
+ program = dvb_base_bin_add_program (dvbbasebin, patp->program_number);
+
+ old_pmt_pid = program->pmt_pid;
+ program->pmt_pid = patp->network_or_program_map_PID;
+
+ if (program->selected) {
+ /* PAT update */
+ if (old_pmt_pid != G_MAXUINT16 && old_pmt_pid != program->pmt_pid) {
+ dvb_base_bin_unref_stream (dvb_base_bin_get_stream (dvbbasebin,
+ old_pmt_pid));
+ }
+
+ stream = dvb_base_bin_get_stream (dvbbasebin, program->pmt_pid);
+ if (stream == NULL)
+ stream = dvb_base_bin_add_stream (dvbbasebin, program->pmt_pid);
+
+ dvb_base_bin_ref_stream (stream);
+
+ rebuild_filter = TRUE;
+ }
+ }
+ g_ptr_array_unref (pat);
+
+ if (rebuild_filter)
+ dvb_base_bin_rebuild_filter (dvbbasebin);
+ }
+
+ static void
+ dvb_base_bin_pmt_info_cb (DvbBaseBin * dvbbasebin, GstMpegtsSection * section)
+ {
+ const GstMpegtsPMT *pmt;
+ DvbBaseBinProgram *program;
+ guint program_number;
+
+ pmt = gst_mpegts_section_get_pmt (section);
+ if (G_UNLIKELY (pmt == NULL)) {
+ GST_WARNING_OBJECT (dvbbasebin, "Received invalid PMT");
+ return;
+ }
+
+ program_number = section->subtable_extension;
+
+ program = dvb_base_bin_get_program (dvbbasebin, program_number);
+ if (program == NULL) {
+ GST_WARNING ("got PMT for program %d but program not in PAT",
+ program_number);
+ program = dvb_base_bin_add_program (dvbbasebin, program_number);
+ }
+
+ program->old_pmt = program->pmt;
+ program->old_section = program->section;
+ program->pmt = pmt;
+ program->section = gst_mpegts_section_ref (section);
+
+ /* activate the program if it's selected and either it's not active or its pmt
+ * changed */
+ if (program->selected && (!program->active || program->old_pmt != NULL))
+ dvb_base_bin_activate_program (dvbbasebin, program);
+
+ if (program->old_pmt) {
+ gst_mpegts_section_unref (program->old_section);
+ program->old_pmt = NULL;
+ }
+ }
+
+ static guint
+ dvb_base_bin_uri_get_type (GType type)
+ {
+ return GST_URI_SRC;
+ }
+
+ static const gchar *const *
+ dvb_base_bin_uri_get_protocols (GType type)
+ {
+ static const gchar *protocols[] = { "dvb", NULL };
+
+ return protocols;
+ }
+
+ static gchar *
+ dvb_base_bin_uri_get_uri (GstURIHandler * handler)
+ {
+ return g_strdup ("dvb://");
+ }
+
+ static gboolean
+ dvb_base_bin_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+ {
+ DvbBaseBin *dvbbasebin = GST_DVB_BASE_BIN (handler);
+ GError *err = NULL;
+ gchar *location;
+
+ location = gst_uri_get_location (uri);
+
+ if (location == NULL)
+ goto no_location;
+
+ /* FIXME: here is where we parse channels.conf */
+ if (!set_properties_for_channel (GST_ELEMENT (dvbbasebin), location, &err))
+ goto set_properties_failed;
+
+ g_free (location);
+ return TRUE;
+
+ post_error_and_exit:
+ {
+ gst_element_message_full (GST_ELEMENT (dvbbasebin), GST_MESSAGE_ERROR,
+ err->domain, err->code, g_strdup (err->message), NULL, __FILE__,
+ GST_FUNCTION, __LINE__);
+ g_propagate_error (error, err);
+ return FALSE;
+ }
+ no_location:
+ {
+ g_set_error (&err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "No details to DVB URI");
+ goto post_error_and_exit;
+ }
+ set_properties_failed:
+ {
+ g_free (location);
+ if (!err)
+ g_set_error (&err, GST_URI_ERROR, GST_URI_ERROR_BAD_REFERENCE,
+ "Could not find information for channel");
+ goto post_error_and_exit;
+ }
+ }
+
+ static void
+ dvb_base_bin_uri_handler_init (gpointer g_iface, gpointer iface_data)
+ {
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = dvb_base_bin_uri_get_type;
+ iface->get_protocols = dvb_base_bin_uri_get_protocols;
+ iface->get_uri = dvb_base_bin_uri_get_uri;
+ iface->set_uri = dvb_base_bin_uri_set_uri;
+ }
+
+
+ static void
+ dvb_base_bin_program_destroy (gpointer data)
+ {
+ DvbBaseBinProgram *program;
+
+ program = (DvbBaseBinProgram *) data;
+
+ if (program->pmt) {
+ program->pmt = NULL;
+ gst_mpegts_section_unref (program->section);
+ }
+
+ g_free (program);
+ }
+
+ static void
+ dvb_base_bin_task (DvbBaseBin * basebin)
+ {
+ gint pollres;
+
+ GST_DEBUG_OBJECT (basebin, "In task");
+
+ /* If we haven't tried to open the cam, try now */
+ if (G_UNLIKELY (basebin->trycam))
+ dvb_base_bin_init_cam (basebin);
+
+ /* poll with timeout */
+ pollres = gst_poll_wait (basebin->poll, GST_SECOND / 4);
+
+ if (G_UNLIKELY (pollres == -1)) {
+ gst_task_stop (basebin->task);
+ return;
+ }
+ if (basebin->hwcam) {
+ cam_device_poll (basebin->hwcam);
+
+ if (basebin->pmtlist_changed) {
+ if (cam_device_ready (basebin->hwcam)) {
+ GST_DEBUG_OBJECT (basebin, "pmt list changed");
+ dvb_base_bin_reset_pmtlist (basebin);
+ } else {
+ GST_DEBUG_OBJECT (basebin, "pmt list changed but CAM not ready");
+ }
+ }
+ }
+ }
--- /dev/null
+ /*
+ * GStreamer
+ *
+ * Copyright (C) 2012 Cisco Systems, Inc.
+ * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ /**
+ * SECTION:element-uvch264src
+ * @title: uvch264src
+ *
+ * A camera bin src element that wraps v4l2src and implements UVC H264
+ * Extension Units (XU) to control the H264 encoder in the camera
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include "gstuvch264_src.h"
+
+ enum
+ {
+ PROP_0,
+ /* uvch264_src properties */
+ PROP_COLORSPACE_NAME,
+ PROP_JPEG_DECODER_NAME,
+ PROP_NUM_CLOCK_SAMPLES,
+ /* v4l2src properties */
+ PROP_NUM_BUFFERS,
+ PROP_DEVICE,
+ PROP_DEVICE_NAME,
+ /* Static controls */
+ PROP_INITIAL_BITRATE,
+ PROP_SLICE_UNITS,
+ PROP_SLICE_MODE,
+ PROP_IFRAME_PERIOD,
+ PROP_USAGE_TYPE,
+ PROP_ENTROPY,
+ PROP_ENABLE_SEI,
+ PROP_NUM_REORDER_FRAMES,
+ PROP_PREVIEW_FLIPPED,
+ PROP_LEAKY_BUCKET_SIZE,
+ /* Dynamic controls */
+ PROP_RATE_CONTROL,
+ PROP_FIXED_FRAMERATE,
+ PROP_MAX_MBPS, /* read-only */
+ PROP_LEVEL_IDC,
+ PROP_PEAK_BITRATE,
+ PROP_AVERAGE_BITRATE,
+ PROP_MIN_IFRAME_QP,
+ PROP_MAX_IFRAME_QP,
+ PROP_MIN_PFRAME_QP,
+ PROP_MAX_PFRAME_QP,
+ PROP_MIN_BFRAME_QP,
+ PROP_MAX_BFRAME_QP,
+ PROP_LTR_BUFFER_SIZE,
+ PROP_LTR_ENCODER_CONTROL,
+ };
+ /* In caps : frame interval (fps), width, height, profile, mux */
+ /* Ignored: temporal, spatial, SNR, MVC views, version, reset */
+ /* Events: LTR, generate IDR */
+
+ enum
+ {
+ /* action signals */
+ SIGNAL_GET_ENUM_SETTING,
+ SIGNAL_GET_BOOLEAN_SETTING,
+ SIGNAL_GET_INT_SETTING,
+ LAST_SIGNAL
+ };
+
+ static guint _signals[LAST_SIGNAL];
+
+ /* Default values */
+ #define DEFAULT_COLORSPACE_NAME "videoconvert"
+ #define DEFAULT_JPEG_DECODER_NAME "jpegdec"
+ #define DEFAULT_NUM_CLOCK_SAMPLES 0
+ #define DEFAULT_NUM_BUFFERS -1
+ #define DEFAULT_DEVICE "/dev/video0"
+ #define DEFAULT_DEVICE_NAME NULL
+ #define DEFAULT_INITIAL_BITRATE 3000000
+ #define DEFAULT_SLICE_UNITS 4
+ #define DEFAULT_SLICE_MODE UVC_H264_SLICEMODE_SLICEPERFRAME
+ #define DEFAULT_IFRAME_PERIOD 10000
+ #define DEFAULT_USAGE_TYPE UVC_H264_USAGETYPE_REALTIME
+ #define DEFAULT_ENTROPY UVC_H264_ENTROPY_CAVLC
+ #define DEFAULT_ENABLE_SEI FALSE
+ #define DEFAULT_NUM_REORDER_FRAMES 0
+ #define DEFAULT_PREVIEW_FLIPPED FALSE
+ #define DEFAULT_LEAKY_BUCKET_SIZE 1000
+ #define DEFAULT_RATE_CONTROL UVC_H264_RATECONTROL_CBR
+ #define DEFAULT_FIXED_FRAMERATE FALSE
+ #define DEFAULT_LEVEL_IDC 40
+ #define DEFAULT_PEAK_BITRATE DEFAULT_INITIAL_BITRATE
+ #define DEFAULT_AVERAGE_BITRATE DEFAULT_INITIAL_BITRATE
+ #define DEFAULT_MIN_QP 10
+ #define DEFAULT_MAX_QP 46
+ #define DEFAULT_LTR_BUFFER_SIZE 0
+ #define DEFAULT_LTR_ENCODER_CONTROL 0
+
+ #define NSEC_PER_SEC (G_USEC_PER_SEC * 1000)
+
+
+ GST_DEBUG_CATEGORY (uvc_h264_src_debug);
+ #define GST_CAT_DEFAULT uvc_h264_src_debug
+
+ #define gst_uvc_h264_src_parent_class parent_class
+ G_DEFINE_TYPE (GstUvcH264Src, gst_uvc_h264_src, GST_TYPE_BASE_CAMERA_SRC);
+ GST_ELEMENT_REGISTER_DEFINE (uvch264src, "uvch264src", GST_RANK_NONE,
+ GST_TYPE_UVC_H264_SRC);
+
+ #define GST_UVC_H264_SRC_VF_CAPS_STR \
+ GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ";" \
+ "image/jpeg," \
+ "width = " GST_VIDEO_SIZE_RANGE "," \
+ "height = " GST_VIDEO_SIZE_RANGE "," \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+ #define GST_UVC_H264_SRC_VID_CAPS_STR \
+ GST_UVC_H264_SRC_VF_CAPS_STR ";" \
+ "video/x-h264, " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE ", " \
+ "stream-format = (string) { byte-stream, avc }, " \
+ "alignment = (string) au, " \
+ "profile = (string) { high, main, baseline, constrained-baseline }"
+
+ /**
+ * GstUvcH264Src!vfsrc:
+ *
+ * The video src pad template
+ */
+ static GstStaticPadTemplate vfsrc_template =
+ GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UVC_H264_SRC_VF_CAPS_STR));
+
+ static GstStaticPadTemplate imgsrc_template =
+ GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_NONE);
+
+ static GstStaticPadTemplate vidsrc_template =
+ GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UVC_H264_SRC_VID_CAPS_STR));
+
+
+ static void gst_uvc_h264_src_dispose (GObject * object);
+ static void gst_uvc_h264_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_uvc_h264_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+ static gboolean gst_uvc_h264_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_uvc_h264_src_send_event (GstElement * element,
+ GstEvent * event);
+ static gboolean gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc *
+ bcamsrc);
+ static gboolean gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc,
+ GstCameraBinMode mode);
+ static gboolean gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc);
+ static void gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc);
+ static GstStateChangeReturn gst_uvc_h264_src_change_state (GstElement * element,
+ GstStateChange trans);
+ static GstPadProbeReturn gst_uvc_h264_src_buffer_probe (GstPad * pad,
+ GstPadProbeInfo * info, gpointer user_data);
+ static GstPadProbeReturn gst_uvc_h264_src_event_probe (GstPad * pad,
+ GstPadProbeInfo * info, gpointer user_data);
+ static void gst_uvc_h264_src_pad_linking_cb (GstPad * pad,
+ GstPad * peer, gpointer user_data);
+ static gboolean gst_uvc_h264_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+
+ static void v4l2src_prepare_format (GstElement * v4l2src, gint fd,
+ GstCaps * caps, gpointer user_data);
+ static void fill_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe, guint32 frame_interval,
+ guint32 width, guint32 height, guint32 profile,
+ UvcH264StreamFormat stream_format);
+ static gboolean xu_query (GstUvcH264Src * self, guint selector, guint query,
+ guchar * data);
+
+ static void set_rate_control (GstUvcH264Src * self);
+ static void set_level_idc (GstUvcH264Src * self);
+ static void set_bitrate (GstUvcH264Src * self);
+ static void set_qp (GstUvcH264Src * self, gint type);
+ static void set_ltr (GstUvcH264Src * self);
+ static void update_rate_control (GstUvcH264Src * self);
+ static guint32 update_level_idc_and_get_max_mbps (GstUvcH264Src * self);
+ static void update_bitrate (GstUvcH264Src * self);
+ static gboolean update_qp (GstUvcH264Src * self, gint type);
+ static void update_ltr (GstUvcH264Src * self);
+
+ static gboolean gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self,
+ gchar * property, gint * mask, gint * default_value);
+ static gboolean gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self,
+ gchar * property, gboolean * changeable, gboolean * def);
+ static gboolean gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self,
+ gchar * property, gint * min, gint * def, gint * max);
+
+ static void
+ gst_uvc_h264_src_class_init (GstUvcH264SrcClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseCameraSrcClass *gstbasecamerasrc_class;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstbasecamerasrc_class = GST_BASE_CAMERA_SRC_CLASS (klass);
+
+ gobject_class->dispose = gst_uvc_h264_src_dispose;
+ gobject_class->set_property = gst_uvc_h264_src_set_property;
+ gobject_class->get_property = gst_uvc_h264_src_get_property;
+
+ gstelement_class->change_state = gst_uvc_h264_src_change_state;
+ gstelement_class->send_event = gst_uvc_h264_src_send_event;
+
+ gstbasecamerasrc_class->construct_pipeline =
+ gst_uvc_h264_src_construct_pipeline;
+ gstbasecamerasrc_class->set_mode = gst_uvc_h264_src_set_mode;
+ gstbasecamerasrc_class->start_capture = gst_uvc_h264_src_start_capture;
+ gstbasecamerasrc_class->stop_capture = gst_uvc_h264_src_stop_capture;
+
+ GST_DEBUG_CATEGORY_INIT (uvc_h264_src_debug, "uvch264src",
+ 0, "UVC H264 Compliant camera bin source");
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "UVC H264 Source",
+ "Source/Video",
+ "UVC H264 Encoding camera source",
+ "Youness Alaoui <youness.alaoui@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &vidsrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &imgsrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &vfsrc_template);
+
+ /* Properties */
+ g_object_class_install_property (gobject_class, PROP_COLORSPACE_NAME,
+ g_param_spec_string ("colorspace-name", "colorspace element name",
+ "The name of the colorspace element",
+ DEFAULT_COLORSPACE_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_JPEG_DECODER_NAME,
+ g_param_spec_string ("jpeg-decoder-name", "jpeg decoder element name",
+ "The name of the jpeg decoder element",
+ DEFAULT_JPEG_DECODER_NAME, G_PARAM_CONSTRUCT | G_PARAM_READWRITE |
+ GST_PARAM_MUTABLE_READY | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
+ g_param_spec_int ("num-clock-samples", "num-clock-samples",
+ "Number of clock samples to gather for the PTS synchronization"
+ " (-1 = unlimited)",
+ 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | GST_PARAM_MUTABLE_PLAYING |
+ G_PARAM_STATIC_STRINGS));
+
+ /* v4l2src proxied properties */
+ g_object_class_install_property (gobject_class, PROP_NUM_BUFFERS,
+ g_param_spec_int ("num-buffers", "num-buffers",
+ "Number of buffers to output before sending EOS (-1 = unlimited)",
+ -1, G_MAXINT, DEFAULT_NUM_BUFFERS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "device",
+ "Device location",
+ DEFAULT_DEVICE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Name of the device", DEFAULT_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /* Static controls */
+ g_object_class_install_property (gobject_class, PROP_INITIAL_BITRATE,
+ g_param_spec_uint ("initial-bitrate", "Initial bitrate",
+ "Initial bitrate in bits/second (static control)",
+ 0, G_MAXUINT, DEFAULT_INITIAL_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_SLICE_UNITS,
+ g_param_spec_uint ("slice-units", "Slice units",
+ "Slice units (static control)",
+ 0, G_MAXUINT16, DEFAULT_SLICE_UNITS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_SLICE_MODE,
+ g_param_spec_enum ("slice-mode", "Slice mode",
+ "Defines the unit of the slice-units property (static control)",
+ UVC_H264_SLICEMODE_TYPE,
+ DEFAULT_SLICE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_IFRAME_PERIOD,
+ g_param_spec_uint ("iframe-period", "I Frame Period",
+ "Time between IDR frames in milliseconds (static control)",
+ 0, G_MAXUINT16, DEFAULT_IFRAME_PERIOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_USAGE_TYPE,
+ g_param_spec_enum ("usage-type", "Usage type",
+ "The usage type (static control)",
+ UVC_H264_USAGETYPE_TYPE, DEFAULT_USAGE_TYPE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_ENTROPY,
+ g_param_spec_enum ("entropy", "Entropy",
+ "Entropy (static control)",
+ UVC_H264_ENTROPY_TYPE, DEFAULT_ENTROPY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_ENABLE_SEI,
+ g_param_spec_boolean ("enable-sei", "Enable SEI",
+ "Enable SEI picture timing (static control)",
+ DEFAULT_ENABLE_SEI, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_NUM_REORDER_FRAMES,
+ g_param_spec_uint ("num-reorder-frames", "Number of Reorder frames",
+ "Number of B frames between the references frames (static control)",
+ 0, G_MAXUINT8, DEFAULT_NUM_REORDER_FRAMES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_PREVIEW_FLIPPED,
+ g_param_spec_boolean ("preview-flipped", "Flip preview",
+ "Horizontal flipped image for non H.264 streams (static control)",
+ DEFAULT_PREVIEW_FLIPPED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_object_class_install_property (gobject_class, PROP_LEAKY_BUCKET_SIZE,
+ g_param_spec_uint ("leaky-bucket-size", "Size of the leaky bucket size",
+ "Size of the leaky bucket size in milliseconds (static control)",
+ 0, G_MAXUINT16, DEFAULT_LEAKY_BUCKET_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ /* Dynamic controls */
+ g_object_class_install_property (gobject_class, PROP_RATE_CONTROL,
+ g_param_spec_enum ("rate-control", "Rate control",
+ "Rate control mode (static & dynamic control)",
+ UVC_H264_RATECONTROL_TYPE, DEFAULT_RATE_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_FIXED_FRAMERATE,
+ g_param_spec_boolean ("fixed-framerate", "Fixed framerate",
+ "Fixed framerate (static & dynamic control)",
+ DEFAULT_FIXED_FRAMERATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_MBPS,
+ g_param_spec_uint ("max-mbps", "Max macroblocks/second",
+ "The number of macroblocks per second for the maximum processing rate",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LEVEL_IDC,
+ g_param_spec_uint ("level-idc", "Level IDC",
+ "Level IDC (dynamic control)",
+ 0, G_MAXUINT8, DEFAULT_LEVEL_IDC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_PEAK_BITRATE,
+ g_param_spec_uint ("peak-bitrate", "Peak bitrate",
+ "The peak bitrate in bits/second (dynamic control)",
+ 0, G_MAXUINT, DEFAULT_PEAK_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_AVERAGE_BITRATE,
+ g_param_spec_uint ("average-bitrate", "Average bitrate",
+ "The average bitrate in bits/second (dynamic control)",
+ 0, G_MAXUINT, DEFAULT_AVERAGE_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_IFRAME_QP,
+ g_param_spec_int ("min-iframe-qp", "Minimum I frame QP",
+ "The minimum Quantization step size for I frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_IFRAME_QP,
+ g_param_spec_int ("max-iframe-qp", "Minimum I frame QP",
+ "The minimum Quantization step size for I frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_PFRAME_QP,
+ g_param_spec_int ("min-pframe-qp", "Minimum P frame QP",
+ "The minimum Quantization step size for P frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_PFRAME_QP,
+ g_param_spec_int ("max-pframe-qp", "Minimum P frame QP",
+ "The minimum Quantization step size for P frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MIN_BFRAME_QP,
+ g_param_spec_int ("min-bframe-qp", "Minimum B frame QP",
+ "The minimum Quantization step size for B frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MIN_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_MAX_BFRAME_QP,
+ g_param_spec_int ("max-bframe-qp", "Minimum B frame QP",
+ "The minimum Quantization step size for B frames (dynamic control)",
+ -G_MAXINT8, G_MAXINT8, DEFAULT_MAX_QP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_LTR_BUFFER_SIZE,
+ g_param_spec_int ("ltr-buffer-size", "LTR Buffer size",
+ "Total number of Long-Term Reference frames (dynamic control)",
+ 0, G_MAXUINT8, DEFAULT_LTR_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+ g_object_class_install_property (gobject_class, PROP_LTR_ENCODER_CONTROL,
+ g_param_spec_int ("ltr-encoder-control",
+ "LTR frames controlled by device",
+ "Number of LTR frames the device can control (dynamic control)", 0,
+ G_MAXUINT8, DEFAULT_LTR_ENCODER_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+
+ _signals[SIGNAL_GET_ENUM_SETTING] =
+ g_signal_new_class_handler ("get-enum-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_enum_setting),
+ NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0);
+ _signals[SIGNAL_GET_BOOLEAN_SETTING] =
+ g_signal_new_class_handler ("get-boolean-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_boolean_setting), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER, 0);
+ _signals[SIGNAL_GET_INT_SETTING] =
+ g_signal_new_class_handler ("get-int-setting",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_CALLBACK (gst_uvc_h264_src_get_int_setting), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 4, G_TYPE_STRING, G_TYPE_POINTER, G_TYPE_POINTER,
+ G_TYPE_POINTER, 0);
+
+ gst_type_mark_as_plugin_api (UVC_H264_ENTROPY_TYPE, 0);
+ gst_type_mark_as_plugin_api (UVC_H264_RATECONTROL_TYPE, 0);
+ gst_type_mark_as_plugin_api (UVC_H264_SLICEMODE_TYPE, 0);
+ gst_type_mark_as_plugin_api (UVC_H264_USAGETYPE_TYPE, 0);
+ }
+
+ static void
+ gst_uvc_h264_src_init (GstUvcH264Src * self)
+ {
+ self->vfsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIEWFINDER_PAD_NAME,
+ GST_PAD_SRC);
+ gst_pad_set_query_function (self->vfsrc,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_src_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->vfsrc);
+
+ self->imgsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME,
+ GST_PAD_SRC);
+ gst_element_add_pad (GST_ELEMENT (self), self->imgsrc);
+
+ self->vidsrc =
+ gst_ghost_pad_new_no_target (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
+ GST_PAD_SRC);
+ gst_pad_set_query_function (self->vidsrc,
+ GST_DEBUG_FUNCPTR (gst_uvc_h264_src_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->vidsrc);
+ gst_pad_add_probe (self->vidsrc, GST_PAD_PROBE_TYPE_BUFFER,
+ gst_uvc_h264_src_buffer_probe, self, NULL);
+ gst_pad_add_probe (self->vfsrc, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
+ gst_uvc_h264_src_event_probe, self, NULL);
+ gst_pad_add_probe (self->vidsrc,
+ GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ gst_uvc_h264_src_event_probe, self, NULL);
+
+ self->srcpad_event_func = GST_PAD_EVENTFUNC (self->vfsrc);
+
+ gst_pad_set_event_function (self->imgsrc, gst_uvc_h264_src_event);
+ gst_pad_set_event_function (self->vidsrc, gst_uvc_h264_src_event);
+ gst_pad_set_event_function (self->vfsrc, gst_uvc_h264_src_event);
+
+ g_signal_connect (self->vidsrc, "linked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vidsrc, "unlinked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vfsrc, "linked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+ g_signal_connect (self->vfsrc, "unlinked",
+ (GCallback) gst_uvc_h264_src_pad_linking_cb, self);
+
+ self->v4l2_fd = -1;
+ gst_base_camera_src_set_mode (GST_BASE_CAMERA_SRC (self), MODE_VIDEO);
+
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ self->main_width = 0;
+ self->main_height = 0;
+ self->main_frame_interval = 0;
+ self->main_stream_format = UVC_H264_STREAMFORMAT_ANNEXB;
+ self->main_profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE;
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ self->secondary_width = 0;
+ self->secondary_height = 0;
+ self->secondary_frame_interval = 0;
+
+ /* v4l2src properties */
+ self->num_buffers = DEFAULT_NUM_BUFFERS;
+ self->device = g_strdup (DEFAULT_DEVICE);
+
+ /* Static controls */
+ self->initial_bitrate = DEFAULT_INITIAL_BITRATE;
+ self->slice_units = DEFAULT_SLICE_UNITS;
+ self->slice_mode = DEFAULT_SLICE_MODE;
+ self->iframe_period = DEFAULT_IFRAME_PERIOD;
+ self->usage_type = DEFAULT_USAGE_TYPE;
+ self->entropy = DEFAULT_ENTROPY;
+ self->enable_sei = DEFAULT_ENABLE_SEI;
+ self->num_reorder_frames = DEFAULT_NUM_REORDER_FRAMES;
+ self->preview_flipped = DEFAULT_PREVIEW_FLIPPED;
+ self->leaky_bucket_size = DEFAULT_LEAKY_BUCKET_SIZE;
+
+ /* Dynamic controls */
+ self->rate_control = DEFAULT_RATE_CONTROL;
+ self->fixed_framerate = DEFAULT_FIXED_FRAMERATE;
+ self->level_idc = DEFAULT_LEVEL_IDC;
+ self->peak_bitrate = DEFAULT_PEAK_BITRATE;
+ self->average_bitrate = DEFAULT_AVERAGE_BITRATE;
+ self->min_qp[QP_I_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_I_FRAME] = DEFAULT_MAX_QP;
+ self->min_qp[QP_P_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_P_FRAME] = DEFAULT_MAX_QP;
+ self->min_qp[QP_B_FRAME] = DEFAULT_MIN_QP;
+ self->max_qp[QP_B_FRAME] = DEFAULT_MAX_QP;
+ self->ltr_buffer_size = DEFAULT_LTR_BUFFER_SIZE;
+ self->ltr_encoder_control = DEFAULT_LTR_ENCODER_CONTROL;
+ }
+
+ static void
+ gst_uvc_h264_src_dispose (GObject * object)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+
+ if (self->usb_ctx)
+ libusb_exit (self->usb_ctx);
+ self->usb_ctx = NULL;
+ g_free (self->jpeg_decoder_name);
+ self->jpeg_decoder_name = NULL;
+ g_free (self->colorspace_name);
+ self->colorspace_name = NULL;
+ g_free (self->device);
+ self->device = NULL;
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_uvc_h264_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+
+ switch (prop_id) {
+ case PROP_COLORSPACE_NAME:
+ g_free (self->colorspace_name);
+ self->colorspace_name = g_value_dup_string (value);
+ break;
+ case PROP_JPEG_DECODER_NAME:
+ g_free (self->jpeg_decoder_name);
+ self->jpeg_decoder_name = g_value_dup_string (value);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ self->num_clock_samples = g_value_get_int (value);
+ if (self->mjpg_demux)
+ g_object_set (self->mjpg_demux,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ break;
+ /* v4l2 properties */
+ case PROP_NUM_BUFFERS:
+ self->num_buffers = g_value_get_int (value);
+ if (self->v4l2_src)
+ g_object_set_property (G_OBJECT (self->v4l2_src), "num-buffers", value);
+ break;
+ case PROP_DEVICE:
+ g_free (self->device);
+ self->device = g_value_dup_string (value);
+ if (self->v4l2_src)
+ g_object_set_property (G_OBJECT (self->v4l2_src), "device", value);
+ break;
+ /* Static controls */
+ case PROP_INITIAL_BITRATE:
+ self->initial_bitrate = g_value_get_uint (value);
+ break;
+ case PROP_SLICE_UNITS:
+ self->slice_units = g_value_get_uint (value);
+ break;
+ case PROP_SLICE_MODE:
+ self->slice_mode = g_value_get_enum (value);
+ break;
+ case PROP_IFRAME_PERIOD:
+ self->iframe_period = g_value_get_uint (value);
+ break;
+ case PROP_USAGE_TYPE:
+ self->usage_type = g_value_get_enum (value);
+ break;
+ case PROP_ENTROPY:
+ self->entropy = g_value_get_enum (value);
+ break;
+ case PROP_ENABLE_SEI:
+ self->enable_sei = g_value_get_boolean (value);
+ break;
+ case PROP_NUM_REORDER_FRAMES:
+ self->num_reorder_frames = g_value_get_uint (value);
+ break;
+ case PROP_PREVIEW_FLIPPED:
+ self->preview_flipped = g_value_get_boolean (value);
+ break;
+ case PROP_LEAKY_BUCKET_SIZE:
+ self->leaky_bucket_size = g_value_get_uint (value);
+ break;
+
+
+ /* Dynamic controls */
+ case PROP_RATE_CONTROL:
+ self->rate_control = g_value_get_enum (value);
+ set_rate_control (self);
+ update_rate_control (self);
+ break;
+ case PROP_FIXED_FRAMERATE:
+ self->fixed_framerate = g_value_get_boolean (value);
+ set_rate_control (self);
+ update_rate_control (self);
+ break;
+ case PROP_LEVEL_IDC:
+ self->level_idc = g_value_get_uint (value);
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+ break;
+ case PROP_PEAK_BITRATE:
+ self->peak_bitrate = g_value_get_uint (value);
+ set_bitrate (self);
+ update_bitrate (self);
+ break;
+ case PROP_AVERAGE_BITRATE:
+ self->average_bitrate = g_value_get_uint (value);
+ set_bitrate (self);
+ update_bitrate (self);
+ break;
+ case PROP_MIN_IFRAME_QP:
+ self->min_qp[QP_I_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ break;
+ case PROP_MAX_IFRAME_QP:
+ self->max_qp[QP_I_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ break;
+ case PROP_MIN_PFRAME_QP:
+ self->min_qp[QP_P_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ break;
+ case PROP_MAX_PFRAME_QP:
+ self->max_qp[QP_P_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ break;
+ case PROP_MIN_BFRAME_QP:
+ self->min_qp[QP_B_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ break;
+ case PROP_MAX_BFRAME_QP:
+ self->max_qp[QP_B_FRAME] = g_value_get_int (value);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ break;
+ case PROP_LTR_BUFFER_SIZE:
+ self->ltr_buffer_size = g_value_get_int (value);
+ set_ltr (self);
+ update_ltr (self);
+ break;
+ case PROP_LTR_ENCODER_CONTROL:
+ self->ltr_encoder_control = g_value_get_int (value);
+ set_ltr (self);
+ update_ltr (self);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_uvc_h264_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (object);
+ uvcx_video_config_probe_commit_t probe;
+
+ switch (prop_id) {
+ case PROP_INITIAL_BITRATE:
+ case PROP_SLICE_UNITS:
+ case PROP_SLICE_MODE:
+ case PROP_IFRAME_PERIOD:
+ case PROP_USAGE_TYPE:
+ case PROP_ENTROPY:
+ case PROP_ENABLE_SEI:
+ case PROP_NUM_REORDER_FRAMES:
+ case PROP_PREVIEW_FLIPPED:
+ case PROP_LEAKY_BUCKET_SIZE:
+ fill_probe_commit (self, &probe, 0, 0, 0, 0, 0);
+ if (GST_STATE (self) >= GST_STATE_PAUSED) {
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe))
+ GST_WARNING_OBJECT (self, "probe_setting GET_CUR error");
+ }
+ break;
+ default:
+ break;
+ }
+
+ switch (prop_id) {
+ case PROP_COLORSPACE_NAME:
+ g_value_set_string (value, self->colorspace_name);
+ break;
+ case PROP_JPEG_DECODER_NAME:
+ g_value_set_string (value, self->jpeg_decoder_name);
+ break;
+ case PROP_NUM_CLOCK_SAMPLES:
+ g_value_set_int (value, self->num_clock_samples);
+ break;
+ /* v4l2src properties */
+ case PROP_NUM_BUFFERS:
+ g_value_set_int (value, self->num_buffers);
+ break;
+ case PROP_DEVICE:
+ g_value_set_string (value, self->device);
+ break;
+ case PROP_DEVICE_NAME:
+ if (self->v4l2_src)
+ g_object_get_property (G_OBJECT (self->v4l2_src), "device-name", value);
+ else
+ g_value_set_static_string (value, "");
+ break;
+ /* Static controls */
+ case PROP_INITIAL_BITRATE:
+ g_value_set_uint (value, probe.dwBitRate);
+ break;
+ case PROP_SLICE_UNITS:
+ g_value_set_uint (value, probe.wSliceUnits);
+ break;
+ case PROP_SLICE_MODE:
+ g_value_set_enum (value, probe.wSliceMode);
+ break;
+ case PROP_IFRAME_PERIOD:
+ g_value_set_uint (value, probe.wIFramePeriod);
+ break;
+ case PROP_USAGE_TYPE:
+ g_value_set_enum (value, probe.bUsageType);
+ break;
+ case PROP_ENTROPY:
+ g_value_set_enum (value, probe.bEntropyCABAC);
+ break;
+ case PROP_ENABLE_SEI:
+ g_value_set_boolean (value,
+ (probe.bTimestamp == UVC_H264_TIMESTAMP_SEI_ENABLE));
+ break;
+ case PROP_NUM_REORDER_FRAMES:
+ g_value_set_uint (value, probe.bNumOfReorderFrames);
+ break;
+ case PROP_PREVIEW_FLIPPED:
+ g_value_set_boolean (value,
+ (probe.bPreviewFlipped == UVC_H264_PREFLIPPED_HORIZONTAL));
+ break;
+ case PROP_LEAKY_BUCKET_SIZE:
+ g_value_set_uint (value, probe.wLeakyBucketSize);
+ break;
+
+ /* Dynamic controls */
+ case PROP_RATE_CONTROL:
+ update_rate_control (self);
+ g_value_set_enum (value, self->rate_control);
+ break;
+ case PROP_FIXED_FRAMERATE:
+ update_rate_control (self);
+ g_value_set_boolean (value, self->fixed_framerate);
+ break;
+ case PROP_MAX_MBPS:
+ g_value_set_uint (value, update_level_idc_and_get_max_mbps (self));
+ break;
+ case PROP_LEVEL_IDC:
+ update_level_idc_and_get_max_mbps (self);
+ g_value_set_uint (value, self->level_idc);
+ break;
+ case PROP_PEAK_BITRATE:
+ update_bitrate (self);
+ g_value_set_uint (value, self->peak_bitrate);
+ break;
+ case PROP_AVERAGE_BITRATE:
+ update_bitrate (self);
+ g_value_set_uint (value, self->average_bitrate);
+ break;
+ case PROP_MIN_IFRAME_QP:
+ update_qp (self, QP_I_FRAME);
+ g_value_set_int (value, self->min_qp[QP_I_FRAME]);
+ break;
+ case PROP_MAX_IFRAME_QP:
+ update_qp (self, QP_I_FRAME);
+ g_value_set_int (value, self->max_qp[QP_I_FRAME]);
+ break;
+ case PROP_MIN_PFRAME_QP:
+ update_qp (self, QP_P_FRAME);
+ g_value_set_int (value, self->min_qp[QP_P_FRAME]);
+ break;
+ case PROP_MAX_PFRAME_QP:
+ update_qp (self, QP_P_FRAME);
+ g_value_set_int (value, self->max_qp[QP_P_FRAME]);
+ break;
+ case PROP_MIN_BFRAME_QP:
+ update_qp (self, QP_B_FRAME);
+ g_value_set_int (value, self->min_qp[QP_B_FRAME]);
+ break;
+ case PROP_MAX_BFRAME_QP:
+ update_qp (self, QP_B_FRAME);
+ g_value_set_int (value, self->max_qp[QP_B_FRAME]);
+ break;
+ case PROP_LTR_BUFFER_SIZE:
+ update_ltr (self);
+ g_value_set_int (value, self->ltr_buffer_size);
+ break;
+ case PROP_LTR_ENCODER_CONTROL:
+ update_ltr (self);
+ g_value_set_int (value, self->ltr_encoder_control);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ }
+ }
+
+ /* Set dynamic controls */
+ static void
+ set_rate_control (GstUvcH264Src * self)
+ {
+ uvcx_rate_control_mode_t req;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error");
+ return;
+ }
+
+ req.bRateControlMode = self->rate_control;
+ if (self->fixed_framerate)
+ req.bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL SET_CUR error");
+ return;
+ }
+ }
+
+ static void
+ set_level_idc (GstUvcH264Src * self)
+ {
+ uvcx_video_advance_config_t req;
+
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error");
+ return;
+ }
+
+ req.blevel_idc = self->level_idc;
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG SET_CUR error");
+ return;
+ }
+ }
+
+ static void
+ set_bitrate (GstUvcH264Src * self)
+ {
+ uvcx_bitrate_layers_t req;
+
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error");
+ return;
+ }
+
+ req.dwPeakBitrate = self->peak_bitrate;
+ req.dwAverageBitrate = self->average_bitrate;
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS SET_CUR error");
+ return;
+ }
+ }
+
+ static void
+ set_qp (GstUvcH264Src * self, gint type)
+ {
+ uvcx_qp_steps_layers_t req;
+
+ req.wLayerID = 0;
+ switch (type) {
+ case QP_I_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_I_FRAME_TYPE;
+ break;
+ case QP_P_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_P_FRAME_TYPE;
+ break;
+ case QP_B_FRAME:
+ req.bFrameType = UVC_H264_QP_STEPS_B_FRAME_TYPE;
+ break;
+ default:
+ return;
+ }
+ req.bMinQp = 0;
+ req.bMaxQp = 0;
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error");
+ return;
+ }
+
+ req.bMinQp = self->min_qp[type];
+ req.bMaxQp = self->max_qp[type];
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return;
+ }
+ }
+
+ static void
+ set_ltr (GstUvcH264Src * self)
+ {
+ uvcx_ltr_buffer_size_control_t req;
+
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error");
+ return;
+ }
+
+ req.bLTRBufferSize = self->ltr_buffer_size;
+ req.bLTREncoderControl = self->ltr_encoder_control;
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, "LTR_BUFFER_SIZE SET_CUR error");
+ return;
+ }
+ }
+
+ /* Get Dynamic controls */
+
+ static void
+ update_rate_control (GstUvcH264Src * self)
+ {
+ uvcx_rate_control_mode_t req;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " RATE_CONTROL GET_CUR error");
+ return;
+ }
+
+ if (self->rate_control != (req.bRateControlMode &
+ ~UVC_H264_RATECONTROL_FIXED_FRM_FLG)) {
+ self->rate_control = (req.bRateControlMode &
+ ~UVC_H264_RATECONTROL_FIXED_FRM_FLG);
+ g_object_notify (G_OBJECT (self), "rate-control");
+ }
+ if (self->fixed_framerate != ((req.bRateControlMode &
+ UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0)) {
+ self->fixed_framerate = ((req.bRateControlMode &
+ UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ g_object_notify (G_OBJECT (self), "fixed-framerate");
+ }
+ }
+
+
+ static guint32
+ update_level_idc_and_get_max_mbps (GstUvcH264Src * self)
+ {
+ uvcx_video_advance_config_t req;
+
+ if (!xu_query (self, UVCX_VIDEO_ADVANCE_CONFIG, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " VIDEO_ADVANCE_CONFIG GET_CUR error");
+ return 0;
+ }
+
+ if (self->level_idc != req.blevel_idc) {
+ self->level_idc = req.blevel_idc;
+ g_object_notify (G_OBJECT (self), "level-idc");
+ }
+ return req.dwMb_max;
+ }
+
+ static void
+ update_bitrate (GstUvcH264Src * self)
+ {
+ uvcx_bitrate_layers_t req;
+
+ if (!xu_query (self, UVCX_BITRATE_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " BITRATE_LAYERS GET_CUR error");
+ return;
+ }
+ if (self->peak_bitrate != req.dwPeakBitrate) {
+ self->peak_bitrate = req.dwPeakBitrate;
+ g_object_notify (G_OBJECT (self), "peak-bitrate");
+ }
+ if (self->average_bitrate != req.dwAverageBitrate) {
+ self->average_bitrate = req.dwAverageBitrate;
+ g_object_notify (G_OBJECT (self), "average-bitrate");
+ }
+ }
+
+ static gboolean
+ update_qp (GstUvcH264Src * self, gint type)
+ {
+ uvcx_qp_steps_layers_t req;
+ guint8 frame_type;
+
+ req.wLayerID = 0;
+ switch (type) {
+ case QP_I_FRAME:
+ frame_type = UVC_H264_QP_STEPS_I_FRAME_TYPE;
+ break;
+ case QP_P_FRAME:
+ frame_type = UVC_H264_QP_STEPS_P_FRAME_TYPE;
+ break;
+ case QP_B_FRAME:
+ frame_type = UVC_H264_QP_STEPS_B_FRAME_TYPE;
+ break;
+ default:
+ return FALSE;
+ }
+ req.bFrameType = frame_type;
+ req.bMinQp = 0;
+ req.bMaxQp = 0;
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_SET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS SET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_QP_STEPS_LAYERS, UVC_GET_CUR, (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " QP_STEPS_LAYERS GET_CUR error");
+ return FALSE;
+ }
+
+ if (req.bFrameType == frame_type) {
+ if (self->min_qp[type] != req.bMinQp) {
+ self->min_qp[type] = req.bMinQp;
+ switch (type) {
+ case QP_I_FRAME:
+ g_object_notify (G_OBJECT (self), "min-iframe-qp");
+ break;
+ case QP_P_FRAME:
+ g_object_notify (G_OBJECT (self), "min-pframe-qp");
+ break;
+ case QP_B_FRAME:
+ g_object_notify (G_OBJECT (self), "min-bframe-qp");
+ break;
+ default:
+ break;
+ }
+ }
+ if (self->max_qp[type] != req.bMaxQp) {
+ self->max_qp[type] = req.bMaxQp;
+ switch (type) {
+ case QP_I_FRAME:
+ g_object_notify (G_OBJECT (self), "max-iframe-qp");
+ break;
+ case QP_P_FRAME:
+ g_object_notify (G_OBJECT (self), "max-pframe-qp");
+ break;
+ case QP_B_FRAME:
+ g_object_notify (G_OBJECT (self), "max-bframe-qp");
+ break;
+ default:
+ break;
+ }
+ }
+ return TRUE;
+ } else {
+ self->min_qp[type] = 0xFF;
+ self->max_qp[type] = 0xFF;
+ return FALSE;
+ }
+ }
+
+ static void
+ update_ltr (GstUvcH264Src * self)
+ {
+ uvcx_ltr_buffer_size_control_t req;
+
+ if (!xu_query (self, UVCX_LTR_BUFFER_SIZE_CONTROL, UVC_GET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR_BUFFER_SIZE GET_CUR error");
+ return;
+ }
+
+ if (self->ltr_buffer_size != req.bLTRBufferSize) {
+ self->ltr_buffer_size = req.bLTRBufferSize;
+ g_object_notify (G_OBJECT (self), "ltr-buffer-size");
+ }
+ if (self->ltr_encoder_control != req.bLTREncoderControl) {
+ self->ltr_encoder_control = req.bLTREncoderControl;
+ g_object_notify (G_OBJECT (self), "ltr-encoder-control");
+ }
+ }
+
+ #define STORE_MIN_DEF_MAX(type) \
+ *(type *)min = *((type *) (min_p + offset)); \
+ *(type *)def = *((type *) (def_p + offset)); \
+ *(type *)max = *((type *) (max_p + offset));
+
+ static gboolean
+ probe_setting (GstUvcH264Src * self, uvcx_control_selector_t selector,
+ guint offset, gint size, gpointer min, gpointer def, gpointer max)
+ {
+ guchar *min_p, *def_p, *max_p;
+ gboolean ret = FALSE;
+ __u16 len;
+
+ if (!xu_query (self, selector, UVC_GET_LEN, (guchar *) & len)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_LEN error");
+ return FALSE;
+ }
+ min_p = g_malloc0 (len);
+ def_p = g_malloc0 (len);
+ max_p = g_malloc0 (len);
+
+ if (!xu_query (self, selector, UVC_GET_MIN, min_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_MIN error");
+ goto end;
+ }
+ if (!xu_query (self, selector, UVC_GET_DEF, def_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_DEF error");
+ goto end;
+ }
+ if (!xu_query (self, selector, UVC_GET_MAX, max_p)) {
+ GST_WARNING_OBJECT (self, "probe_setting GET_MAX error");
+ goto end;
+ }
+
+ switch (size) {
+ case -1:
+ STORE_MIN_DEF_MAX (gint8);
+ ret = TRUE;
+ break;
+ case 1:
+ STORE_MIN_DEF_MAX (guint8);
+ ret = TRUE;
+ break;
+ case -2:
+ STORE_MIN_DEF_MAX (gint16);
+ ret = TRUE;
+ break;
+ case 2:
+ STORE_MIN_DEF_MAX (guint16);
+ ret = TRUE;
+ break;
+ case -4:
+ STORE_MIN_DEF_MAX (gint32);
+ ret = TRUE;
+ break;
+ case 4:
+ STORE_MIN_DEF_MAX (guint32);
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ end:
+ g_free (min_p);
+ g_free (def_p);
+ g_free (max_p);
+
+ return ret;
+ }
+
+ static gboolean
+ test_enum_setting (GstUvcH264Src * self, guint offset, guint size,
+ guint16 value)
+ {
+ uvcx_video_config_probe_commit_t cur;
+ uvcx_video_config_probe_commit_t req;
+ guchar *req_p = (guchar *) & req;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " GET_CUR error");
+ return FALSE;
+ }
+
+ req = cur;
+
+ if (size == 1)
+ *((guint8 *) (req_p + offset)) = (guint8) value;
+ else
+ *((guint16 *) (req_p + offset)) = value;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, req_p)) {
+ GST_WARNING_OBJECT (self, " SET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR, req_p)) {
+ GST_WARNING_OBJECT (self, " GET_CUR error");
+ return FALSE;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR, (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " SET_CUR error");
+ return FALSE;
+ }
+
+ if (size == 1)
+ return *((guint8 *) (req_p + offset)) == (guint8) value;
+ else
+ return *((guint16 *) (req_p + offset)) == value;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_get_enum_setting (GstUvcH264Src * self, gchar * property,
+ gint * mask, gint * default_value)
+ {
+ guint8 min, def, max;
+ guint8 en;
+ gboolean ret = FALSE;
+
+ if (g_strcmp0 (property, "slice-mode") == 0) {
+ guint16 min16, def16, max16;
+ guint16 en16;
+
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wSliceMode), 2,
+ &min16, &def16, &max16);
+ if (ret) {
+ *default_value = def16;
+ *mask = 0;
+ for (en16 = min16; en16 <= max16; en16++) {
+ if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t,
+ wSliceMode), 2, en16))
+ *mask |= (1 << en16);
+ }
+ }
+ } else if (g_strcmp0 (property, "usage-type") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bUsageType), 1,
+ &min, &def, &max);
+ if (ret) {
+ *default_value = def;
+ *mask = 0;
+ for (en = min; en <= max; en++) {
+ if (test_enum_setting (self, offsetof (uvcx_video_config_probe_commit_t,
+ bUsageType), 1, en))
+ *mask |= (1 << en);
+ }
+ }
+ } else if (g_strcmp0 (property, "entropy") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bEntropyCABAC), 1,
+ &min, &def, &max);
+ if (ret) {
+ *mask = (1 << min) | (1 << max);
+ *default_value = def;
+ }
+ } else if (g_strcmp0 (property, "rate-control") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bRateControlMode), 1,
+ &min, &def, &max);
+ if (ret) {
+ uvcx_rate_control_mode_t cur;
+
+ *default_value = def;
+ *mask = 0;
+
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR,
+ (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " CONTROL_MODE GET_CUR error");
+ return FALSE;
+ }
+
+ for (en = min; en <= max; en++) {
+ uvcx_rate_control_mode_t req = { 0, en };
+
+ if (xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR,
+ (guchar *) & req) &&
+ xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_GET_CUR,
+ (guchar *) & req) && req.bRateControlMode == en)
+ *mask |= (1 << en);
+ }
+ if (!xu_query (self, UVCX_RATE_CONTROL_MODE, UVC_SET_CUR,
+ (guchar *) & cur)) {
+ GST_WARNING_OBJECT (self, " CONTROL_MODE SET_CUR error");
+ return FALSE;
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_get_boolean_setting (GstUvcH264Src * self, gchar * property,
+ gboolean * changeable, gboolean * default_value)
+ {
+ guint8 min, def, max;
+ gboolean ret = FALSE;
+
+ if (g_strcmp0 (property, "enable-sei") == 0) {
+ if ((ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bTimestamp), 1,
+ &min, &def, &max))) {
+ *changeable = (min != max);
+ *default_value = (def != 0);
+ }
+ } else if (g_strcmp0 (property, "preview-flipped") == 0) {
+ if ((ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bPreviewFlipped), 1,
+ &min, &def, &max))) {
+ *changeable = (min != max);
+ *default_value = (def != 0);
+ }
+ } else if (g_strcmp0 (property, "fixed-framerate") == 0) {
+ if ((ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bRateControlMode),
+ 1, &min, &def, &max))) {
+ *changeable = ((max & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ *default_value = ((def & UVC_H264_RATECONTROL_FIXED_FRM_FLG) != 0);
+ }
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_get_int_setting (GstUvcH264Src * self, gchar * property,
+ gint * min, gint * def, gint * max)
+ {
+ guint32 min32, def32, max32;
+ guint16 min16, def16, max16;
+ guint8 min8, def8, max8;
+ gint8 smin8, sdef8, smax8;
+ gboolean ret = FALSE;
+
+ GST_DEBUG_OBJECT (self, "Probing int property %s", property);
+ if (g_strcmp0 (property, "initial-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, dwBitRate), 4,
+ &min32, &def32, &max32);
+ if (ret) {
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ }
+ } else if (g_strcmp0 (property, "slice-units") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wSliceUnits), 2,
+ &min16, &def16, &max16);
+ if (ret) {
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ }
+ } else if (g_strcmp0 (property, "iframe-period") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wIFramePeriod), 2,
+ &min16, &def16, &max16);
+ if (ret) {
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ }
+ } else if (g_strcmp0 (property, "num-reorder-frames") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, bNumOfReorderFrames), 1,
+ &min8, &def8, &max8);
+ if (ret) {
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ }
+ } else if (g_strcmp0 (property, "leaky-bucket-size") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_CONFIG_PROBE,
+ offsetof (uvcx_video_config_probe_commit_t, wLeakyBucketSize), 2,
+ &min16, &def16, &max16);
+ if (ret) {
+ *min = min16;
+ *def = def16;
+ *max = max16;
+ }
+ } else if (g_strcmp0 (property, "level-idc") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG,
+ offsetof (uvcx_video_advance_config_t, blevel_idc), 1,
+ &min8, &def8, &max8);
+ if (ret) {
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ }
+ } else if (g_strcmp0 (property, "max-mbps") == 0) {
+ ret = probe_setting (self, UVCX_VIDEO_ADVANCE_CONFIG,
+ offsetof (uvcx_video_advance_config_t, dwMb_max), 4,
+ &min32, &def32, &max32);
+ if (ret) {
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ }
+ } else if (g_strcmp0 (property, "peak-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_BITRATE_LAYERS,
+ offsetof (uvcx_bitrate_layers_t, dwPeakBitrate), 4,
+ &min32, &def32, &max32);
+ if (ret) {
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ }
+ } else if (g_strcmp0 (property, "average-bitrate") == 0) {
+ ret = probe_setting (self, UVCX_BITRATE_LAYERS,
+ offsetof (uvcx_bitrate_layers_t, dwAverageBitrate), 4,
+ &min32, &def32, &max32);
+ if (ret) {
+ *min = min32;
+ *def = def32;
+ *max = max32;
+ }
+ } else if (g_strcmp0 (property, "min-iframe-qp") == 0) {
+ if (update_qp (self, QP_I_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "max-iframe-qp") == 0) {
+ if (update_qp (self, QP_I_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "min-pframe-qp") == 0) {
+ if (update_qp (self, QP_P_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "max-pframe-qp") == 0) {
+ if (update_qp (self, QP_P_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "min-bframe-qp") == 0) {
+ if (update_qp (self, QP_B_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMinQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "max-bframe-qp") == 0) {
+ if (update_qp (self, QP_B_FRAME))
+ ret = probe_setting (self, UVCX_QP_STEPS_LAYERS,
+ offsetof (uvcx_qp_steps_layers_t, bMaxQp), 1, &smin8, &sdef8, &smax8);
+ if (ret) {
+ *min = smin8;
+ *def = sdef8;
+ *max = smax8;
+ }
+ } else if (g_strcmp0 (property, "ltr-buffer-size") == 0) {
+ ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL,
+ offsetof (uvcx_ltr_buffer_size_control_t, bLTRBufferSize), 1,
+ &min8, &def8, &max8);
+ if (ret) {
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ }
+ } else if (g_strcmp0 (property, "ltr-encoder-control") == 0) {
+ ret = probe_setting (self, UVCX_LTR_BUFFER_SIZE_CONTROL,
+ offsetof (uvcx_ltr_buffer_size_control_t, bLTREncoderControl), 1,
+ &min8, &def8, &max8);
+ if (ret) {
+ *min = min8;
+ *def = def8;
+ *max = max8;
+ }
+ } else {
+ g_return_val_if_reached (FALSE);
+ }
+
+ return ret;
+ }
+
+ static GstPadProbeReturn
+ gst_uvc_h264_src_event_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer user_data)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+ GstEvent *event = info->data;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ if (pad == self->vidsrc) {
+ const GstSegment *s;
+
+ gst_event_parse_segment (event, &s);
+ gst_segment_copy_into (s, &self->segment);
+ }
+ break;
+ case GST_EVENT_EOS:
+ ret = self->reconfiguring ? GST_PAD_PROBE_DROP : GST_PAD_PROBE_OK;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstPadProbeReturn
+ gst_uvc_h264_src_buffer_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer user_data)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+ GstBuffer *buffer = info->data;
+
+ /* TODO: Check the NALU type and make sure it is a keyframe */
+ if (self->key_unit_event) {
+ GstClockTime ts, running_time, stream_time;
+ gboolean all_headers;
+ guint count;
+ GstEvent *downstream;
+
+ if (gst_video_event_parse_upstream_force_key_unit (self->key_unit_event,
+ &ts, &all_headers, &count)) {
+ if (!GST_CLOCK_TIME_IS_VALID (ts)) {
+ ts = GST_BUFFER_TIMESTAMP (buffer);
+ }
+ running_time = gst_segment_to_running_time (&self->segment,
+ GST_FORMAT_TIME, ts);
+
+ stream_time = gst_segment_to_stream_time (&self->segment,
+ GST_FORMAT_TIME, ts);
+
+ GST_DEBUG_OBJECT (self, "Sending downstream force-key-unit : %d - %d ts=%"
+ GST_TIME_FORMAT " running time =%" GST_TIME_FORMAT " stream=%"
+ GST_TIME_FORMAT, all_headers, count, GST_TIME_ARGS (ts),
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (stream_time));
+ downstream = gst_video_event_new_downstream_force_key_unit (ts,
+ stream_time, running_time, all_headers, count);
+ gst_pad_push_event (self->vidsrc, downstream);
+ gst_event_replace (&self->key_unit_event, NULL);
+ }
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_parse_event (GstUvcH264Src * self, GstPad * pad,
+ GstEvent * event)
+ {
+ const GstStructure *s = gst_event_get_structure (event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ if (pad == self->vidsrc && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ if (gst_video_event_is_force_key_unit (event)) {
+ uvcx_picture_type_control_t req = { 0, 0 };
+ GstClockTime ts;
+ gboolean all_headers;
+
+ if (gst_video_event_parse_upstream_force_key_unit (event,
+ &ts, &all_headers, NULL)) {
+ GST_INFO_OBJECT (self, "Received upstream force-key-unit : %d %"
+ GST_TIME_FORMAT, all_headers, GST_TIME_ARGS (ts));
+ /* TODO: wait until 'ts' time is reached */
+ if (all_headers)
+ req.wPicType = UVC_H264_PICTYPE_IDR_WITH_PPS_SPS;
+ else
+ req.wPicType = UVC_H264_PICTYPE_IDR;
+
+ if (!xu_query (self, UVCX_PICTURE_TYPE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " PICTURE_TYPE_CONTROL SET_CUR error");
+ } else {
+ gst_event_replace (&self->key_unit_event, event);
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ }
+ } else if (s &&
+ gst_structure_has_name (s, "uvc-h264-ltr-picture-control")) {
+ guint put_at, encode_using;
+
+ if (gst_structure_get_uint (s, "put-at", &put_at) &&
+ gst_structure_get_uint (s, "encode-using", &encode_using)) {
+ uvcx_ltr_picture_control req = { 0, put_at, encode_using };
+
+ if (!xu_query (self, UVCX_LTR_PICTURE_CONTROL, UVC_SET_CUR,
+ (guchar *) & req)) {
+ GST_WARNING_OBJECT (self, " LTR PICTURE_CONTROL SET_CUR error");
+ } else {
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ }
+ return TRUE;
+ } else if (s && gst_structure_has_name (s, "uvc-h264-bitrate-control")) {
+ guint average, peak;
+
+ if (gst_structure_get_uint (s, "average-bitrate", &average) &&
+ gst_structure_get_uint (s, "peak-bitrate", &peak)) {
+ self->average_bitrate = average;
+ self->peak_bitrate = peak;
+ set_bitrate (self);
+ update_bitrate (self);
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-qp-control")) {
+ gint min_qp, max_qp;
+ gboolean valid_event = FALSE;
+
+ if (gst_structure_get_int (s, "min-iframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-iframe-qp", &max_qp)) {
+ self->min_qp[QP_I_FRAME] = min_qp;
+ self->max_qp[QP_I_FRAME] = max_qp;
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ valid_event = TRUE;
+ }
+ if (gst_structure_get_int (s, "min-pframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-pframe-qp", &max_qp)) {
+ self->min_qp[QP_P_FRAME] = min_qp;
+ self->max_qp[QP_P_FRAME] = max_qp;
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ valid_event = TRUE;
+ }
+ if (gst_structure_get_int (s, "min-bframe-qp", &min_qp) &&
+ gst_structure_get_int (s, "max-bframe-qp", &max_qp)) {
+ self->min_qp[QP_B_FRAME] = min_qp;
+ self->max_qp[QP_B_FRAME] = max_qp;
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ valid_event = TRUE;
+ }
+
+ if (valid_event) {
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-rate-control")) {
+ UvcH264RateControl rate;
+ gboolean fixed_framerate;
+
+ if (gst_structure_get_enum (s, "rate-control",
+ UVC_H264_RATECONTROL_TYPE, (gint *) & rate) &&
+ gst_structure_get_boolean (s, "fixed-framerate",
+ &fixed_framerate)) {
+ self->rate_control = rate;
+ self->fixed_framerate = fixed_framerate;
+ set_rate_control (self);
+ update_rate_control (self);
+
+ gst_event_unref (event);
+
+ return TRUE;
+ }
+ } else if (s && gst_structure_has_name (s, "uvc-h264-level-idc")) {
+ guint level_idc;
+
+ if (gst_structure_get_uint (s, "level-idc", &level_idc)) {
+ self->level_idc = level_idc;
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+
+ gst_event_unref (event);
+ }
+ }
+ }
+ break;
+ default:
+ break;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_send_event (GstElement * element, GstEvent * event)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (element);
+
+ if (gst_uvc_h264_src_parse_event (self, self->vidsrc, event))
+ return TRUE;
+
+ return GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
+
+ static gboolean
+ gst_uvc_h264_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (GST_PAD_PARENT (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ if (pad == self->vidsrc) {
+ const GstSegment *s;
+
+ gst_event_parse_segment (event, &s);
+ gst_segment_copy_into (s, &self->segment);
+ }
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ if (pad == self->vidsrc)
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+ break;
+ default:
+ if (gst_uvc_h264_src_parse_event (self, pad, event))
+ return TRUE;
+ break;
+ }
+ return self->srcpad_event_func (pad, parent, event);
+ }
+
+ static gboolean
+ xu_query (GstUvcH264Src * self, guint selector, guint query, guchar * data)
+ {
+ struct uvc_xu_control_query xu;
+ __u16 len;
+
+ if (self->v4l2_fd == -1) {
+ GST_WARNING_OBJECT (self, "Can't query XU with fd = -1");
+ return FALSE;
+ }
+
+ xu.unit = self->h264_unit_id;
+ xu.selector = selector;
+
+ xu.query = UVC_GET_LEN;
+ xu.size = sizeof (len);
+ xu.data = (unsigned char *) &len;
+ if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_LEN error");
+ return FALSE;
+ }
+
+ if (query == UVC_GET_LEN) {
+ *((__u16 *) data) = len;
+ } else {
+ xu.query = query;
+ xu.size = len;
+ xu.data = data;
+ if (-1 == ioctl (self->v4l2_fd, UVCIOC_CTRL_QUERY, &xu)) {
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ static void
+ fill_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe, guint32 frame_interval,
+ guint32 width, guint32 height, guint32 profile,
+ UvcH264StreamFormat stream_format)
+ {
+ probe->dwFrameInterval = frame_interval;
+ probe->dwBitRate = self->initial_bitrate;
+ probe->wWidth = width;
+ probe->wHeight = height;
+ probe->wSliceUnits = self->slice_units;
+ probe->wSliceMode = self->slice_mode;
+ probe->wProfile = profile;
+ probe->wIFramePeriod = self->iframe_period;
+ probe->bUsageType = self->usage_type;
+ probe->bRateControlMode = self->rate_control;
+ if (self->fixed_framerate)
+ probe->bRateControlMode |= UVC_H264_RATECONTROL_FIXED_FRM_FLG;
+ probe->bStreamFormat = stream_format;
+ probe->bEntropyCABAC = self->entropy;
+ probe->bTimestamp = self->enable_sei ?
+ UVC_H264_TIMESTAMP_SEI_ENABLE : UVC_H264_TIMESTAMP_SEI_DISABLE;
+ probe->bNumOfReorderFrames = self->num_reorder_frames;
+ probe->bPreviewFlipped = self->preview_flipped ?
+ UVC_H264_PREFLIPPED_HORIZONTAL : UVC_H264_PREFLIPPED_DISABLE;
+ probe->wLeakyBucketSize = self->leaky_bucket_size;
+ }
+
+ static void
+ print_probe_commit (GstUvcH264Src * self,
+ uvcx_video_config_probe_commit_t * probe)
+ {
+ GST_DEBUG_OBJECT (self, " Frame interval : %d *100ns",
+ probe->dwFrameInterval);
+ GST_DEBUG_OBJECT (self, " Bit rate : %d", probe->dwBitRate);
+ GST_DEBUG_OBJECT (self, " Hints : %X", probe->bmHints);
+ GST_DEBUG_OBJECT (self, " Configuration index : %d",
+ probe->wConfigurationIndex);
+ GST_DEBUG_OBJECT (self, " Width : %d", probe->wWidth);
+ GST_DEBUG_OBJECT (self, " Height : %d", probe->wHeight);
+ GST_DEBUG_OBJECT (self, " Slice units : %d", probe->wSliceUnits);
+ GST_DEBUG_OBJECT (self, " Slice mode : %X", probe->wSliceMode);
+ GST_DEBUG_OBJECT (self, " Profile : %X", probe->wProfile);
+ GST_DEBUG_OBJECT (self, " IFrame Period : %d ms", probe->wIFramePeriod);
+ GST_DEBUG_OBJECT (self, " Estimated video delay : %d ms",
+ probe->wEstimatedVideoDelay);
+ GST_DEBUG_OBJECT (self, " Estimated max config delay : %d ms",
+ probe->wEstimatedMaxConfigDelay);
+ GST_DEBUG_OBJECT (self, " Usage type : %X", probe->bUsageType);
+ GST_DEBUG_OBJECT (self, " Rate control mode : %X", probe->bRateControlMode);
+ GST_DEBUG_OBJECT (self, " Temporal scale mode : %X",
+ probe->bTemporalScaleMode);
+ GST_DEBUG_OBJECT (self, " Spatial scale mode : %X",
+ probe->bSpatialScaleMode);
+ GST_DEBUG_OBJECT (self, " SNR scale mode : %X", probe->bSNRScaleMode);
+ GST_DEBUG_OBJECT (self, " Stream mux option : %X", probe->bStreamMuxOption);
+ GST_DEBUG_OBJECT (self, " Stream Format : %X", probe->bStreamFormat);
+ GST_DEBUG_OBJECT (self, " Entropy CABAC : %X", probe->bEntropyCABAC);
+ GST_DEBUG_OBJECT (self, " Timestamp : %X", probe->bTimestamp);
+ GST_DEBUG_OBJECT (self, " Num of reorder frames : %d",
+ probe->bNumOfReorderFrames);
+ GST_DEBUG_OBJECT (self, " Preview flipped : %X", probe->bPreviewFlipped);
+ GST_DEBUG_OBJECT (self, " View : %d", probe->bView);
+ GST_DEBUG_OBJECT (self, " Stream ID : %X", probe->bStreamID);
+ GST_DEBUG_OBJECT (self, " Spatial layer ratio : %f",
+ ((probe->bSpatialLayerRatio & 0xF0) >> 4) +
+ ((float) (probe->bSpatialLayerRatio & 0x0F)) / 16);
+ GST_DEBUG_OBJECT (self, " Leaky bucket size : %d ms",
+ probe->wLeakyBucketSize);
+ }
+
+ static void
+ configure_h264 (GstUvcH264Src * self, gint fd)
+ {
+ uvcx_video_config_probe_commit_t probe;
+
+ /* Set the secondary format first, so the last SET_CUR will be for the
+ * H264 format. This way, we can still get the static control values with
+ * a GET_CUR. Otherwise all static properties will return 0 because that's
+ * what the GET_CUR of the raw format returns.
+ */
+ if (self->secondary_format == UVC_H264_SRC_FORMAT_RAW) {
+ memset (&probe, 0, sizeof (probe));
+ probe.dwFrameInterval = self->secondary_frame_interval;
+ probe.wWidth = self->secondary_width;
+ probe.wHeight = self->secondary_height;
+ probe.bStreamMuxOption = 5;
+
+ GST_DEBUG_OBJECT (self, "RAW PROBE SET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "RAW PROBE GET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "COMMIT SET_CUR error");
+ return;
+ }
+ }
+ /* Print MIN/MAX/DEF probe values for debugging purposes */
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MIN,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_MIN : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_MAX,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_MAX : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_DEF,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_DEF : ");
+ print_probe_commit (self, &probe);
+
+ fill_probe_commit (self, &probe, self->main_frame_interval,
+ self->main_width, self->main_height, self->main_profile,
+ self->main_stream_format);
+ if (self->secondary_format != UVC_H264_SRC_FORMAT_NONE)
+ probe.bStreamMuxOption = 3;
+ else
+ probe.bStreamMuxOption = 0;
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION | UVC_H264_BMHINTS_PROFILE |
+ UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ GST_DEBUG_OBJECT (self, "PROBE SET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return;
+ }
+ GST_DEBUG_OBJECT (self, "PROBE GET_CUR : ");
+ print_probe_commit (self, &probe);
+
+ /* Must validate the settings accepted by the encoder */
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "COMMIT SET_CUR error");
+ return;
+ }
+ }
+
+ static void
+ v4l2src_prepare_format (GstElement * v4l2src, gint fd, GstCaps * caps,
+ gpointer user_data)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+
+ if (self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ /* TODO: update static controls and g_object_notify those that changed */
+ configure_h264 (self, fd);
+
+ /* TODO: update dynamic controls on READY state */
+ /* Configure dynamic controls */
+ set_rate_control (self);
+ update_rate_control (self);
+ set_level_idc (self);
+ update_level_idc_and_get_max_mbps (self);
+ set_bitrate (self);
+ update_bitrate (self);
+ set_qp (self, QP_I_FRAME);
+ update_qp (self, QP_I_FRAME);
+ set_qp (self, QP_P_FRAME);
+ update_qp (self, QP_P_FRAME);
+ set_qp (self, QP_B_FRAME);
+ update_qp (self, QP_B_FRAME);
+ set_ltr (self);
+ update_ltr (self);
+ }
+ }
+
+ static gboolean
+ _extract_caps_info (GstStructure * structure, guint16 * width, guint16 * height,
+ guint32 * frame_interval)
+ {
+ gint w, h, fps_n, fps_d;
+ gboolean ret = TRUE;
+
+ ret &= gst_structure_get_int (structure, "width", &w);
+ ret &= gst_structure_get_int (structure, "height", &h);
+ ret &= gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
+
+ if (ret) {
+ *width = w;
+ *height = h;
+ /* Interval is in 100ns */
+ *frame_interval = GST_TIME_AS_NSECONDS ((fps_d * GST_SECOND) / fps_n) / 100;
+ }
+
+ return ret;
+ }
+
+ static guint16
+ _extract_profile (GstStructure * structure)
+ {
+ const gchar *profile_str;
+ guint16 profile;
+
+ profile = UVC_H264_PROFILE_HIGH;
+ profile_str = gst_structure_get_string (structure, "profile");
+ if (profile_str) {
+ if (!strcmp (profile_str, "constrained-baseline")) {
+ profile = UVC_H264_PROFILE_CONSTRAINED_BASELINE;
+ } else if (!strcmp (profile_str, "baseline")) {
+ profile = UVC_H264_PROFILE_BASELINE;
+ } else if (!strcmp (profile_str, "main")) {
+ profile = UVC_H264_PROFILE_MAIN;
+ } else if (!strcmp (profile_str, "high")) {
+ profile = UVC_H264_PROFILE_HIGH;
+ }
+ }
+ return profile;
+ }
+
+ static UvcH264StreamFormat
+ _extract_stream_format (GstStructure * structure)
+ {
+ const gchar *stream_format;
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (!strcmp (stream_format, "avc"))
+ return UVC_H264_STREAMFORMAT_NAL;
+ else if (!strcmp (stream_format, "byte-stream"))
+ return UVC_H264_STREAMFORMAT_ANNEXB;
+ }
+ return UVC_H264_STREAMFORMAT_ANNEXB;
+ }
+
+ static GstCaps *
+ _transform_caps (GstUvcH264Src * self, GstCaps * caps, const gchar * name)
+ {
+ GstElement *el = gst_element_factory_make (name, NULL);
+ GstElement *cf = gst_element_factory_make ("capsfilter", NULL);
+ GstElement *fs = gst_element_factory_make ("fakesink", NULL);
+ GstPad *sink;
+ GstCaps *out_caps = NULL;
+
+ if (!el || !cf || !fs) {
+ if (el)
+ gst_object_unref (el);
+ if (cf)
+ gst_object_unref (cf);
+ if (fs)
+ gst_object_unref (fs);
+ goto done;
+ }
+
+ gst_element_set_locked_state (el, TRUE);
+ gst_element_set_locked_state (cf, TRUE);
+ gst_element_set_locked_state (fs, TRUE);
+
+ if (!gst_bin_add (GST_BIN (self), el)) {
+ gst_object_unref (el);
+ gst_object_unref (cf);
+ gst_object_unref (fs);
+ goto done;
+ }
+ if (!gst_bin_add (GST_BIN (self), cf)) {
+ gst_object_unref (cf);
+ gst_object_unref (fs);
+ gst_bin_remove (GST_BIN (self), el);
+ goto done;
+ }
+ if (!gst_bin_add (GST_BIN (self), fs)) {
+ gst_object_unref (fs);
+ gst_bin_remove (GST_BIN (self), el);
+ gst_bin_remove (GST_BIN (self), cf);
+ goto done;
+ }
+
+ g_object_set (cf, "caps", caps, NULL);
+
+ if (!gst_element_link (cf, fs))
+ goto error_remove;
+ if (!gst_element_link (el, cf))
+ goto error_remove;
+
+ sink = gst_element_get_static_pad (el, "sink");
+ if (!sink)
+ goto error_remove;
+ GST_DEBUG_OBJECT (self, "Transforming: %" GST_PTR_FORMAT, caps);
+
+ caps = gst_pad_query_caps (sink, NULL);
+ gst_object_unref (sink);
+
+ GST_DEBUG_OBJECT (self, "Result: %" GST_PTR_FORMAT, out_caps);
+
+ error_remove:
+ gst_bin_remove (GST_BIN (self), cf);
+ gst_bin_remove (GST_BIN (self), el);
+ gst_bin_remove (GST_BIN (self), fs);
+
+ done:
+ if (out_caps == NULL)
+ out_caps = gst_caps_copy (caps);
+
+ return out_caps;
+ }
+
+ static GstCaps *
+ gst_uvc_h264_src_transform_caps (GstUvcH264Src * self, GstCaps * caps)
+ {
+ GstCaps *h264 = gst_caps_new_empty_simple ("video/x-h264");
+ GstCaps *jpg = gst_caps_new_empty_simple ("image/jpeg");
+ GstCaps *h264_caps = gst_caps_intersect (h264, caps);
+ GstCaps *jpg_caps = gst_caps_intersect (jpg, caps);
+
+ /* TODO: Keep caps order after transformation */
+ caps = _transform_caps (self, caps, self->colorspace_name);
+ caps = gst_caps_make_writable (caps);
+
+ if (!gst_caps_is_empty (h264_caps)) {
+ gst_caps_append (caps, h264_caps);
+ } else {
+ gst_caps_unref (h264_caps);
+ }
+
+ if (!gst_caps_is_empty (jpg_caps)) {
+ gst_caps_append (caps, jpg_caps);
+ } else {
+ gst_caps_unref (jpg_caps);
+ }
+
+ gst_caps_unref (h264);
+ gst_caps_unref (jpg);
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_uvc_h264_src_fixate_caps (GstUvcH264Src * self, GstPad * v4l_pad,
+ GstCaps * v4l_caps, GstCaps * peer_caps, gboolean primary)
+ {
+ GstCaps *caps = NULL;
+ GstCaps *icaps = NULL;
+ GstCaps *tcaps = NULL;
+ int i;
+
+ if (v4l_caps == NULL || gst_caps_is_any (v4l_caps)) {
+ GST_DEBUG_OBJECT (self, "v4l caps are invalid. not fixating");
+ return NULL;
+ }
+
+ tcaps = gst_caps_intersect_full (peer_caps, v4l_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ GST_DEBUG_OBJECT (self, "intersect: %" GST_PTR_FORMAT, tcaps);
+ icaps = gst_caps_normalize (tcaps);
+
+ /* Prefer the first caps we are compatible with that the peer proposed */
+ for (i = 0; i < gst_caps_get_size (icaps); i++) {
+ /* get intersection */
+ GstCaps *ipcaps = gst_caps_copy_nth (icaps, i);
+ GstStructure *s = gst_caps_get_structure (ipcaps, 0);
+
+ GST_DEBUG_OBJECT (self, "Testing %s: %" GST_PTR_FORMAT,
+ primary ? "primary" : "secondary", ipcaps);
+ if (primary && gst_structure_has_name (s, "video/x-h264")) {
+ uvcx_video_config_probe_commit_t probe;
+ guint16 width;
+ guint16 height;
+ guint32 interval;
+ guint16 profile;
+ UvcH264StreamFormat stream_format;
+
+ if (_extract_caps_info (s, &width, &height, &interval)) {
+ profile = _extract_profile (s);
+ stream_format = _extract_stream_format (s);
+ fill_probe_commit (self, &probe, interval, width, height,
+ profile, stream_format);
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION |
+ UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return NULL;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return NULL;
+ }
+ GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d==%d",
+ probe.wWidth, width, probe.wHeight, height,
+ probe.bStreamFormat, stream_format);
+ if (probe.wWidth == width && probe.wHeight == height &&
+ probe.bStreamFormat == stream_format) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ } else if (!primary && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ uvcx_video_config_probe_commit_t probe;
+ guint16 width;
+ guint16 height;
+ guint32 interval;
+
+ if (_extract_caps_info (s, &width, &height, &interval)) {
+ if (gst_structure_has_name (s, "video/x-raw")) {
+ guint8 mux = 0;
+ const gchar *format = gst_structure_get_string (s, "format");
+
+ if ((format = gst_structure_get_string (s, "format"))) {
+ if (g_strcmp0 (format, "YUY2") == 0)
+ mux = 4;
+ else if (g_strcmp0 (format, "NV12") == 0)
+ mux = 8;
+ }
+ if (mux != 0) {
+ memset (&probe, 0, sizeof (probe));
+ probe.dwFrameInterval = interval;
+ probe.wWidth = width;
+ probe.wHeight = height;
+ probe.bStreamMuxOption = mux | 1;
+ probe.bmHints = UVC_H264_BMHINTS_RESOLUTION |
+ UVC_H264_BMHINTS_PROFILE | UVC_H264_BMHINTS_FRAME_INTERVAL;
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_SET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE SET_CUR error");
+ return NULL;
+ }
+
+ if (!xu_query (self, UVCX_VIDEO_CONFIG_PROBE, UVC_GET_CUR,
+ (guchar *) & probe)) {
+ GST_WARNING_OBJECT (self, "PROBE GET_CUR error");
+ return NULL;
+ }
+ GST_DEBUG_OBJECT (self, "Probe gives us %d==%d, %d==%d, %d~=%d",
+ probe.wWidth, width, probe.wHeight, height,
+ probe.bStreamMuxOption, mux);
+ if (probe.wWidth == width && probe.wHeight == height &&
+ (probe.bStreamMuxOption & mux) != 0) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ } else if (gst_structure_has_name (s, "image/jpeg")) {
+ /* HACK ALERT: No way of figuring this one out but it seems the
+ * camera doesn't allow for h264 muxing and jpeg resolution higher
+ * than 640x480 so we shouldn't allow it */
+ if (width <= 640 && height <= 480) {
+ caps = ipcaps;
+ break;
+ }
+ }
+ }
+ } else {
+ caps = ipcaps;
+ break;
+ }
+ gst_caps_unref (ipcaps);
+ }
+
+ if (caps) {
+ caps = gst_caps_make_writable (caps);
+
+ /* now fixate */
+ if (!gst_caps_is_empty (caps)) {
+ caps = gst_caps_fixate (caps);
+ GST_DEBUG_OBJECT (self, "fixated to: %" GST_PTR_FORMAT, caps);
+ }
+
+ if (gst_caps_is_empty (caps) || gst_caps_is_any (caps)) {
+ gst_caps_unref (caps);
+ caps = NULL;
+ }
+ }
+
+ return caps;
+ }
+
+ static void
+ gst_uvc_h264_src_destroy_pipeline (GstUvcH264Src * self, gboolean v4l2src)
+ {
+ GstIterator *iter = NULL;
+ gboolean done;
+
+ if (v4l2src && self->v4l2_src) {
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+ }
+ if (self->mjpg_demux) {
+ gst_bin_remove (GST_BIN (self), self->mjpg_demux);
+ gst_element_set_state (self->mjpg_demux, GST_STATE_NULL);
+ gst_object_unref (self->mjpg_demux);
+ self->mjpg_demux = NULL;
+ }
+ if (self->jpeg_dec) {
+ gst_bin_remove (GST_BIN (self), self->jpeg_dec);
+ gst_element_set_state (self->jpeg_dec, GST_STATE_NULL);
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ }
+ if (self->vid_colorspace) {
+ gst_bin_remove (GST_BIN (self), self->vid_colorspace);
+ gst_element_set_state (self->vid_colorspace, GST_STATE_NULL);
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ }
+ if (self->vf_colorspace) {
+ gst_bin_remove (GST_BIN (self), self->vf_colorspace);
+ gst_element_set_state (self->vf_colorspace, GST_STATE_NULL);
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ }
+ iter = gst_bin_iterate_elements (GST_BIN (self));
+ done = FALSE;
+ while (!done) {
+ GValue data = { 0, };
+
+ switch (gst_iterator_next (iter, &data)) {
+ case GST_ITERATOR_OK:
+ {
+ GstElement *child = g_value_get_object (&data);
+ if (child != self->v4l2_src) {
+ gst_bin_remove (GST_BIN (self), child);
+ gst_element_set_state (child, GST_STATE_NULL);
+ }
+ g_value_reset (&data);
+ break;
+ }
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_ERROR:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ gst_iterator_free (iter);
+ }
+
+ static gboolean
+ ensure_v4l2src (GstUvcH264Src * self)
+ {
+ gchar *device = NULL;
+ GstClock *v4l2_clock = NULL;
+
+ if (self->v4l2_src == NULL) {
+ /* Create v4l2 source and set it up */
+ self->v4l2_src = gst_element_factory_make ("v4l2src", NULL);
+ if (!self->v4l2_src || !gst_bin_add (GST_BIN (self), self->v4l2_src))
+ goto error;
+ gst_object_ref (self->v4l2_src);
+ g_signal_connect (self->v4l2_src, "prepare-format",
+ (GCallback) v4l2src_prepare_format, self);
+ }
+
+ g_object_get (self->v4l2_src, "device", &device, NULL);
+ g_object_set (self->v4l2_src,
+ "device", self->device, "num-buffers", self->num_buffers, NULL);
+
+ v4l2_clock = gst_element_get_clock (self->v4l2_src);
+
+ /* Set to NULL if the device changed */
+ if (g_strcmp0 (device, self->device))
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ g_free (device);
+
+ if (gst_element_set_state (self->v4l2_src, GST_STATE_READY) !=
+ GST_STATE_CHANGE_SUCCESS) {
+ GST_DEBUG_OBJECT (self, "Unable to set v4l2src to READY state");
+ goto error_remove;
+ }
+
+ /* Set/Update the fd and unit id after we go to READY */
+ g_object_get (self->v4l2_src, "device-fd", &self->v4l2_fd, NULL);
+ self->h264_unit_id =
+ xu_get_id (GST_OBJECT (self), self->device, &self->usb_ctx);
+
+ if (self->h264_unit_id == 0) {
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ ("Device is not a valid UVC H264 camera"), (NULL));
+ goto error_remove;
+ }
+
+ /* going to state READY makes v4l2src lose its reference to the clock */
+ if (v4l2_clock) {
+ gst_element_set_clock (self->v4l2_src, v4l2_clock);
+ gst_element_set_base_time (self->v4l2_src,
+ gst_element_get_base_time (GST_ELEMENT (self)));
+ gst_object_unref (v4l2_clock);
+ }
+
+ return TRUE;
+
+ error_remove:
++ if (v4l2_clock)
++ gst_object_unref (v4l2_clock);
++
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+
+ error:
+ if (v4l2_clock)
+ gst_object_unref (v4l2_clock);
+ if (self->v4l2_src)
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc);
+ GstIterator *iter = NULL;
+ gboolean iter_done = FALSE;
+ GstPad *vf_pad = NULL;
+ GstCaps *vf_caps = NULL;
+ GstStructure *vf_struct = NULL;
+ GstPad *vid_pad = NULL;
+ GstCaps *vid_caps = NULL;
+ GstStructure *vid_struct = NULL;
+ GstCaps *src_caps = NULL;
+ GstPad *v4l_pad = NULL;
+ GstCaps *v4l_caps = NULL;
+ gboolean jpg2raw = FALSE;
+
+ enum
+ {
+ RAW_NONE, ENCODED_NONE, NONE_RAW, NONE_ENCODED,
+ H264_JPG, H264_RAW, H264_JPG2RAW, NONE_NONE,
+ RAW_RAW, ENCODED_ENCODED,
+ } type;
+
+ GST_DEBUG_OBJECT (self, "Construct pipeline");
+ self->reconfiguring = TRUE;
+
+ if (self->v4l2_src) {
+ uvcx_encoder_reset req = { 0 };
+
+ if (!xu_query (self, UVCX_ENCODER_RESET, UVC_SET_CUR, (guchar *) & req))
+ GST_WARNING_OBJECT (self, " UVCX_ENCODER_RESET SET_CUR error");
+ }
+
+ if (!ensure_v4l2src (self))
+ goto error;
+
+ gst_uvc_h264_src_destroy_pipeline (self, FALSE);
+
+ /* Potentially unlink v4l2src to the ghost pads */
+ gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), NULL);
+ gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), NULL);
+
+ if (gst_pad_is_linked (self->vfsrc))
+ vf_caps = gst_pad_peer_query_caps (self->vfsrc, NULL);
+ if (gst_pad_is_linked (self->vidsrc))
+ vid_caps = gst_pad_peer_query_caps (self->vidsrc, NULL);
+
+ GST_DEBUG_OBJECT (self, "vfsrc caps : %" GST_PTR_FORMAT, vf_caps);
+ GST_DEBUG_OBJECT (self, "vidsrc caps : %" GST_PTR_FORMAT, vid_caps);
+ if (!self->started) {
+ GST_DEBUG_OBJECT (self, "video not started. Ignoring vidsrc caps");
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+ vid_caps = NULL;
+ }
+
+ v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ v4l_caps = gst_pad_query_caps (v4l_pad, NULL);
+ GST_DEBUG_OBJECT (self, "v4l2src caps : %" GST_PTR_FORMAT, v4l_caps);
+ if (vid_caps) {
+ GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vid_caps);
+
+ gst_caps_unref (vid_caps);
+ vid_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ trans_caps, TRUE);
+ gst_caps_unref (trans_caps);
+
+ if (vid_caps) {
+ vid_struct = gst_caps_get_structure (vid_caps, 0);
+ } else {
+ GST_WARNING_OBJECT (self, "Could not negotiate vidsrc caps format");
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+ }
+ GST_DEBUG_OBJECT (self, "Fixated vidsrc caps : %" GST_PTR_FORMAT, vid_caps);
+
+ if (vid_caps && gst_structure_has_name (vid_struct, "video/x-h264")) {
+ self->main_format = UVC_H264_SRC_FORMAT_H264;
+ if (!_extract_caps_info (vid_struct, &self->main_width,
+ &self->main_height, &self->main_frame_interval)) {
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+
+ self->main_stream_format = _extract_stream_format (vid_struct);
+ self->main_profile = _extract_profile (vid_struct);
+ } else {
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ }
+
+ if (vf_caps) {
+ GstCaps *trans_caps = gst_uvc_h264_src_transform_caps (self, vf_caps);
+
+ gst_caps_unref (vf_caps);
+ vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ trans_caps, FALSE);
+
+ /* If we couldn't find a suitable vf cap, try the jpeg2raw pipeline */
+ if (!vf_caps && self->main_format == UVC_H264_SRC_FORMAT_H264) {
+ GstCaps *jpg_caps;
+
+ jpg2raw = TRUE;
+ jpg_caps = _transform_caps (self, trans_caps, self->jpeg_decoder_name);
+
+ vf_caps = gst_uvc_h264_src_fixate_caps (self, v4l_pad, v4l_caps,
+ jpg_caps, FALSE);
+ gst_caps_unref (jpg_caps);
+ }
+ gst_caps_unref (trans_caps);
+ if (vf_caps) {
+ vf_struct = gst_caps_get_structure (vf_caps, 0);
+ } else {
+ GST_WARNING_OBJECT (self, "Could not negotiate vfsrc caps format");
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ goto error_remove;
+ }
+ }
+ GST_DEBUG_OBJECT (self, "Fixated vfsrc caps : %" GST_PTR_FORMAT, vf_caps);
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+
+ if (vf_caps && vid_caps &&
+ !gst_structure_has_name (vid_struct, "video/x-h264")) {
+ /* Allow for vfsrc+vidsrc to both be raw or jpeg */
+ if (gst_structure_has_name (vid_struct, "image/jpeg") &&
+ gst_structure_has_name (vf_struct, "image/jpeg")) {
+ self->main_format = UVC_H264_SRC_FORMAT_JPG;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ type = ENCODED_ENCODED;
+ } else if (!gst_structure_has_name (vid_struct, "image/jpeg") &&
+ !gst_structure_has_name (vf_struct, "image/jpeg")) {
+ self->main_format = UVC_H264_SRC_FORMAT_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ type = RAW_RAW;
+ } else {
+ goto error_remove;
+ }
+ } else if (vf_caps && vid_caps) {
+ guint32 smallest_frame_interval;
+
+ if (!_extract_caps_info (vf_struct, &self->secondary_width,
+ &self->secondary_height, &self->secondary_frame_interval))
+ goto error_remove;
+
+ if (jpg2raw == FALSE && gst_structure_has_name (vf_struct, "image/jpeg")) {
+ type = H264_JPG;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else {
+ if (jpg2raw) {
+ type = H264_JPG2RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else {
+ type = H264_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ }
+ }
+ smallest_frame_interval = MIN (self->main_frame_interval,
+ self->secondary_frame_interval);
+ /* Just to avoid a potential division by zero, set interval to 30 fps */
+ if (smallest_frame_interval == 0)
+ smallest_frame_interval = 333333;
+
+ /* Frame interval is in 100ns units */
+ src_caps = gst_caps_new_simple ("image/jpeg",
+ "width", G_TYPE_INT, self->secondary_width,
+ "height", G_TYPE_INT, self->secondary_height,
+ "framerate", GST_TYPE_FRACTION,
+ NSEC_PER_SEC / smallest_frame_interval, 100, NULL);
+ } else if (vf_caps || vid_caps) {
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ if (vid_struct && gst_structure_has_name (vid_struct, "video/x-h264")) {
+ type = ENCODED_NONE;
+ } else if (vid_struct && gst_structure_has_name (vid_struct, "image/jpeg")) {
+ type = ENCODED_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_JPG;
+ } else if (vf_struct && gst_structure_has_name (vf_struct, "image/jpeg")) {
+ type = NONE_ENCODED;
+ self->secondary_format = UVC_H264_SRC_FORMAT_JPG;
+ } else if (vid_struct) {
+ type = RAW_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_RAW;
+ } else if (vf_struct) {
+ type = NONE_RAW;
+ self->secondary_format = UVC_H264_SRC_FORMAT_RAW;
+ } else {
+ g_assert_not_reached ();
+ type = NONE_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ }
+ } else {
+ type = NONE_NONE;
+ self->main_format = UVC_H264_SRC_FORMAT_NONE;
+ self->secondary_format = UVC_H264_SRC_FORMAT_NONE;
+ }
+
+ switch (type) {
+ case NONE_NONE:
+ GST_DEBUG_OBJECT (self, "None+None");
+ vf_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case RAW_NONE:
+ GST_DEBUG_OBJECT (self, "Raw+None");
+ self->vid_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vid_colorspace ||
+ !gst_bin_add (GST_BIN (self), self->vid_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vid_colorspace);
+ if (!gst_element_link (self->v4l2_src, self->vid_colorspace))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src");
+ break;
+ case NONE_RAW:
+ GST_DEBUG_OBJECT (self, "None+Raw");
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vf_colorspace ||
+ !gst_bin_add (GST_BIN (self), self->vf_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link (self->v4l2_src, self->vf_colorspace))
+ goto error_remove_all;
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case ENCODED_NONE:
+ GST_DEBUG_OBJECT (self, "Encoded+None");
+ vid_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case NONE_ENCODED:
+ GST_DEBUG_OBJECT (self, "None+Encoded");
+ vf_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ break;
+ case H264_JPG:
+ GST_DEBUG_OBJECT (self, "H264+JPG");
+ self->mjpg_demux = gst_element_factory_make ("uvch264mjpgdemux", NULL);
+ if (!self->mjpg_demux || !gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->mjpg_demux, "jpeg");
+ break;
+ case H264_RAW:
+ GST_DEBUG_OBJECT (self, "H264+Raw");
+ self->mjpg_demux = gst_element_factory_make ("uvch264mjpgdemux", NULL);
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->mjpg_demux || !self->vf_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) {
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ if (!gst_element_link_pads (self->mjpg_demux, "yuy2",
+ self->vf_colorspace, "sink"))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case H264_JPG2RAW:
+ GST_DEBUG_OBJECT (self, "H264+Raw(jpegdec)");
+ self->mjpg_demux = gst_element_factory_make ("uvch264mjpgdemux", NULL);
+ self->jpeg_dec = gst_element_factory_make (self->jpeg_decoder_name, NULL);
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->mjpg_demux || !self->jpeg_dec || !self->vf_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->mjpg_demux))
+ goto error_remove;
+ gst_object_ref (self->mjpg_demux);
+ g_object_set (self->mjpg_demux, "device-fd", self->v4l2_fd,
+ "num-clock-samples", self->num_clock_samples, NULL);
+ if (!gst_bin_add (GST_BIN (self), self->jpeg_dec)) {
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->jpeg_dec);
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace)) {
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_element_link_filtered (self->v4l2_src, self->mjpg_demux,
+ src_caps))
+ goto error_remove_all;
+ if (!gst_element_link_pads (self->mjpg_demux, "jpeg", self->jpeg_dec,
+ "sink"))
+ goto error_remove_all;
+ if (!gst_element_link (self->jpeg_dec, self->vf_colorspace))
+ goto error_remove_all;
+ vid_pad = gst_element_get_static_pad (self->mjpg_demux, "h264");
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ break;
+ case RAW_RAW:
+ {
+ GstElement *tee = NULL;
+
+ GST_DEBUG_OBJECT (self, "Raw+Raw");
+ tee = gst_element_factory_make ("tee", NULL);
+ if (!tee || !gst_bin_add (GST_BIN (self), tee)) {
+ if (tee)
+ gst_object_unref (tee);
+ goto error_remove;
+ }
+ self->vf_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ self->vid_colorspace = gst_element_factory_make (self->colorspace_name,
+ NULL);
+ if (!self->vf_colorspace || !self->vid_colorspace)
+ goto error_remove;
+ if (!gst_bin_add (GST_BIN (self), self->vf_colorspace))
+ goto error_remove;
+ gst_object_ref (self->vf_colorspace);
+ if (!gst_bin_add (GST_BIN (self), self->vid_colorspace)) {
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ goto error_remove_all;
+ }
+ gst_object_ref (self->vid_colorspace);
+ if (!gst_element_link (self->v4l2_src, tee))
+ goto error_remove_all;
+ if (!gst_element_link (tee, self->vf_colorspace))
+ goto error_remove_all;
+ if (!gst_element_link (tee, self->vid_colorspace))
+ goto error_remove_all;
+ vf_pad = gst_element_get_static_pad (self->vf_colorspace, "src");
+ vid_pad = gst_element_get_static_pad (self->vid_colorspace, "src");
+ }
+ break;
+ case ENCODED_ENCODED:
+ {
+ GstElement *tee = NULL;
+
+ GST_DEBUG_OBJECT (self, "Encoded+Encoded");
+ tee = gst_element_factory_make ("tee", NULL);
+ if (!tee || !gst_bin_add (GST_BIN (self), tee)) {
+ if (tee)
+ gst_object_unref (tee);
+ goto error_remove;
+ }
+ if (!gst_element_link (self->v4l2_src, tee))
+ goto error_remove_all;
+ vf_pad = gst_element_request_pad_simple (tee, "src_%u");
+ vid_pad = gst_element_request_pad_simple (tee, "src_%u");
+ }
+ break;
+ }
+
+ if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc), vid_pad) ||
+ !gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad))
+ goto error_remove_all;
+ if (vid_pad)
+ gst_object_unref (vid_pad);
+ if (vf_pad)
+ gst_object_unref (vf_pad);
+ vid_pad = vf_pad = NULL;
+
+ if (vf_caps)
+ gst_caps_unref (vf_caps);
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+ if (src_caps)
+ gst_caps_unref (src_caps);
+ vf_caps = vid_caps = src_caps = NULL;
+
+ /* Sync children states, in sink to source order */
+ if (self->vid_colorspace &&
+ !gst_element_sync_state_with_parent (self->vid_colorspace))
+ goto error_remove_all;
+ if (self->vf_colorspace &&
+ !gst_element_sync_state_with_parent (self->vf_colorspace))
+ goto error_remove_all;
+ if (self->jpeg_dec && !gst_element_sync_state_with_parent (self->jpeg_dec))
+ goto error_remove_all;
+ if (self->mjpg_demux &&
+ !gst_element_sync_state_with_parent (self->mjpg_demux))
+ goto error_remove_all;
+ if (self->v4l2_src && !gst_element_sync_state_with_parent (self->v4l2_src))
+ goto error_remove_all;
+
+ /* Sync any remaining children states with bin's state */
+ iter = gst_bin_iterate_elements (GST_BIN (self));
+ iter_done = FALSE;
+ while (!iter_done) {
+ GstElement *child = NULL;
+ GValue data = { 0, };
+
+ switch (gst_iterator_next (iter, &data)) {
+ case GST_ITERATOR_OK:
+ child = g_value_get_object (&data);
+ if (!gst_element_sync_state_with_parent (child)) {
+ g_value_reset (&data);
+ gst_iterator_free (iter);
+ goto error_remove_all;
+ }
+ g_value_reset (&data);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_ERROR:
+ iter_done = TRUE;
+ break;
+ case GST_ITERATOR_DONE:
+ iter_done = TRUE;
+ break;
+ }
+ }
+ gst_iterator_free (iter);
+
+ self->reconfiguring = FALSE;
+ return TRUE;
+
+ error_remove_all:
+ gst_uvc_h264_src_destroy_pipeline (self, FALSE);
+ error_remove:
+ gst_element_set_state (self->v4l2_src, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (self), self->v4l2_src);
+
+ error:
+ if (self->v4l2_src)
+ gst_object_unref (self->v4l2_src);
+ self->v4l2_src = NULL;
+ self->v4l2_fd = -1;
+ self->h264_unit_id = 0;
+
+ if (self->mjpg_demux)
+ gst_object_unref (self->mjpg_demux);
+ self->mjpg_demux = NULL;
+ if (self->jpeg_dec)
+ gst_object_unref (self->jpeg_dec);
+ self->jpeg_dec = NULL;
+ if (self->vid_colorspace)
+ gst_object_unref (self->vid_colorspace);
+ self->vid_colorspace = NULL;
+ if (self->vf_colorspace)
+ gst_object_unref (self->vf_colorspace);
+ self->vf_colorspace = NULL;
+
+ if (src_caps)
+ gst_caps_unref (src_caps);
+
+ if (vf_caps)
+ gst_caps_unref (vf_caps);
+ if (vid_caps)
+ gst_caps_unref (vid_caps);
+
+ if (vid_pad)
+ gst_object_unref (vid_pad);
+ if (vf_pad)
+ gst_object_unref (vf_pad);
+
+ self->reconfiguring = FALSE;
+ return FALSE;
+ }
+
+ static GstCaps *
+ gst_uvc_h264_src_getcaps (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (parent);
+ GstCaps *template = NULL;
+ GstCaps *result = NULL;
+
+ if (pad == self->vfsrc)
+ template = gst_static_pad_template_get_caps (&vfsrc_template);
+ else if (pad == self->vidsrc)
+ template = gst_static_pad_template_get_caps (&vidsrc_template);
+ else
+ template = gst_caps_new_empty ();
+
+ if (self->v4l2_src) {
+ GstCaps *filter;
+ GstPad *v4l_pad = gst_element_get_static_pad (self->v4l2_src, "src");
+ GstCaps *v4l_caps = NULL;
+ GstCaps *new_caps = NULL;
+
+ gst_query_parse_caps (query, &filter);
+ v4l_caps = gst_pad_query_caps (v4l_pad, filter);
+ new_caps = gst_uvc_h264_src_transform_caps (self, v4l_caps);
+
+ result = gst_caps_intersect (new_caps, template);
+ gst_object_unref (v4l_pad);
+ gst_caps_unref (v4l_caps);
+ gst_caps_unref (new_caps);
+ gst_caps_unref (template);
+ } else {
+ result = template;
+ }
+
+ return result;
+ }
+
+ static gboolean
+ gst_uvc_h264_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean ret;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *caps;
+
+ caps = gst_uvc_h264_src_getcaps (pad, parent, query);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return ret;
+ }
+
+
+ static gboolean
+ gst_uvc_h264_src_set_mode (GstBaseCameraSrc * bcamsrc, GstCameraBinMode mode)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (bcamsrc);
+
+ GST_DEBUG_OBJECT (self, "set mode to %d", mode);
+
+ return (mode == MODE_VIDEO);
+ }
+
+ static gboolean
+ gst_uvc_h264_src_start_capture (GstBaseCameraSrc * camerasrc)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc);
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (self, "start capture");
+
+ if (!self->started) {
+ self->started = TRUE;
+ if (GST_STATE (self) >= GST_STATE_READY) {
+ ret = gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ if (!ret) {
+ GST_DEBUG_OBJECT (self, "Could not start capture");
+ self->started = FALSE;
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_uvc_h264_src_stop_capture (GstBaseCameraSrc * camerasrc)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (camerasrc);
+
+ GST_DEBUG_OBJECT (self, "stop capture");
+
+ if (self->started) {
+ self->started = FALSE;
+ if (GST_STATE (self) >= GST_STATE_READY)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ gst_base_camera_src_finish_capture (camerasrc);
+ }
+ }
+
+ static void
+ gst_uvc_h264_src_pad_linking_cb (GstPad * pad,
+ GstPad * peer, gpointer user_data)
+ {
+ GstUvcH264Src *self = GST_UVC_H264_SRC (user_data);
+ gchar *pad_name = gst_pad_get_name (pad);
+
+ GST_DEBUG_OBJECT (self, "Pad %s was (un)linked. Renegotiating", pad_name);
+ g_free (pad_name);
+ if (GST_STATE (self) >= GST_STATE_READY)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ }
+
+
+ static GstStateChangeReturn
+ gst_uvc_h264_src_change_state (GstElement * element, GstStateChange trans)
+ {
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstUvcH264Src *self = GST_UVC_H264_SRC (element);
+
+ switch (trans) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!ensure_v4l2src (self)) {
+ ret = GST_STATE_CHANGE_FAILURE;
+ goto end;
+ }
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ if (!self->v4l2_src)
+ gst_uvc_h264_src_construct_pipeline (GST_BASE_CAMERA_SRC (self));
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, trans);
+
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto end;
+
+ switch (trans) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_uvc_h264_src_destroy_pipeline (self, TRUE);
+ break;
+ default:
+ break;
+ }
+
+
+ end:
+ return ret;
+ }
--- /dev/null
+ /*
+ * GStreamer
+ * Copyright (C) 2010 Nokia Corporation <multimedia@maemo.org>
+ * Copyright (C) 2011 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /*
+ TODO review
+ Examples:
+ ./gst-camerabin2-test --image-width=2048 --image-height=1536
+ ./gst-camerabin2-test --mode=2 --capture-time=10 --image-width=848 --image-height=480 --view-framerate-num=2825 \
+ --view-framerate-den=100
+
+ gst-camerabin2-test --help
+ Usage:
+ gst-camerabin2-test [OPTION...]
+
+ camerabin command line test application.
+
+ Help Options:
+ -h, --help Show help options
+ --help-all Show all help options
+ --help-gst Show GStreamer Options
+
+ Application Options:
+ --ev-compensation EV compensation (-2.5..2.5, default = 0)
+ --aperture Aperture (size of lens opening, default = 0 (auto))
+ --flash-mode Flash mode (default = 0 (auto))
+ --scene-mode Scene mode (default = 6 (auto))
+ --exposure Exposure (default = 0 (auto))
+ --iso-speed ISO speed (default = 0 (auto))
+ --white-balance-mode White balance mode (default = 0 (auto))
+ --colour-tone-mode Colour tone mode (default = 0 (auto))
+ --directory Directory for capture file(s) (default is current directory)
+ --mode Capture mode (default = 0 (image), 1 = video)
+ --capture-time Time to capture video in seconds (default = 10)
+ --capture-total Total number of captures to be done (default = 1)
+ --zoom Zoom (100 = 1x (default), 200 = 2x etc.)
+ --wrapper-source Camera source wrapper used for setting the video source
+ --video-source Video source used in still capture and video recording
+ --video-device Video device to be set on the video source (e.g. /dev/video0)
+ --audio-source Audio source used in video recording
+ --image-pp List of image post-processing elements separated with comma
+ --viewfinder-sink Viewfinder sink (default = fakesink)
+ --image-width Width for capture (only used if the caps
+ arguments aren't set)
+ --image-height Height for capture (only used if the caps
+ arguments aren't set)
+ --view-framerate-num Framerate numerator for viewfinder
+ --view-framerate-den Framerate denominator for viewfinder
+ --preview-caps Preview caps (e.g. video/x-raw-rgb,width=320,height=240)
+ --viewfinder-filter Filter to process all frames going to viewfinder sink
+ --x-width X window width (default = 320)
+ --x-height X window height (default = 240)
+ --no-xwindow Do not create XWindow
+ --encoding-target Video encoding target name
+ --encoding-profile Video encoding profile name
+ --encoding-profile-filename Video encoding profile filename
+ --image-capture-caps Image capture caps (e.g. video/x-raw-rgb,width=640,height=480)
+ --viewfinder-caps Viewfinder caps (e.g. video/x-raw-rgb,width=640,height=480)
+ --video-capture-caps Video capture caps (e.g. video/x-raw-rgb,width=640,height=480)
+ --performance-measure Collect timing information about the
+ captures and provides performance statistics at the end
+ --performance-targets A list of doubles that are the performance target
+ times for each of the measured timestamps. The order is
+ startup time, change mode time, shot to save, shot to snapshot,
+ shot to shot, preview to precapture, shot to buffer.
+ e.g. 3.5,1.0,5.0,2.5,5.0,1.5,1.0
+ * Startup time -> time it takes for camerabin to reach playing
+ * Change mode time -> time it takes for camerabin to change to the selected
+ mode in playing
+ * Shot to save -> time it takes from start-capture to having the image saved
+ to disk
+ * Shot to snapshot -> time it takes from start-capture to getting a snapshot
+ * Shot to shot -> time from one start-capture to the next one
+ * Preview to precapture -> time it takes from getting the snapshot to the
+ next buffer that reaches the viewfinder
+ * Shot to buffer -> time it takes from start-capture to the moment a buffer
+ is pushed out of the camera source
+
+ */
+
+ /*
+ * Includes
+ */
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #define GST_USE_UNSTABLE_API 1
+
+ #include <gst/gst.h>
+ #include <gst/video/videooverlay.h>
+ #include <gst/interfaces/photography.h>
+ #include <string.h>
+ #include <sys/time.h>
+ #include <time.h>
+ #include <unistd.h>
+ #include <stdlib.h>
+ #include <glib.h>
+ #include <glib/gstdio.h>
+ #include <gst/pbutils/encoding-profile.h>
+ #include <gst/pbutils/encoding-target.h>
+ #include <X11/Xlib.h>
+ #include <X11/Xatom.h>
+ /*
+ * debug logging
+ */
+ GST_DEBUG_CATEGORY_STATIC (camerabin_test);
+ #define GST_CAT_DEFAULT camerabin_test
+
+ #define TIME_DIFF(a,b) ((((gint64)(a)) - ((gint64)(b))) / (gdouble) GST_SECOND)
+
+ #define TIME_FORMAT "02d.%09u"
+ #define TIMEDIFF_FORMAT "0.6lf"
+
+ #define TIME_ARGS(t) \
+ (GST_CLOCK_TIME_IS_VALID (t) && (t) < 99 * GST_SECOND) ? \
+ (gint) ((((GstClockTime)(t)) / GST_SECOND) % 60) : 99, \
+ (GST_CLOCK_TIME_IS_VALID (t) && ((t) < 99 * GST_SECOND)) ? \
+ (guint) (((GstClockTime)(t)) % GST_SECOND) : 999999999
+
+ #define TIMEDIFF_ARGS(t) (t)
+
+ typedef struct _CaptureTiming
+ {
+ GstClockTime start_capture;
+ GstClockTime got_preview;
+ GstClockTime capture_done;
+ GstClockTime precapture;
+ GstClockTime camera_capture;
+ } CaptureTiming;
+
+ typedef struct _CaptureTimingStats
+ {
+ GstClockTime shot_to_shot;
+ GstClockTime shot_to_save;
+ GstClockTime shot_to_snapshot;
+ GstClockTime preview_to_precapture;
+ GstClockTime shot_to_buffer;
+ } CaptureTimingStats;
+
+ static void
+ capture_timing_stats_add (CaptureTimingStats * a, CaptureTimingStats * b)
+ {
+ a->shot_to_shot += b->shot_to_shot;
+ a->shot_to_snapshot += b->shot_to_snapshot;
+ a->shot_to_save += b->shot_to_save;
+ a->preview_to_precapture += b->preview_to_precapture;
+ a->shot_to_buffer += b->shot_to_buffer;
+ }
+
+ static void
+ capture_timing_stats_div (CaptureTimingStats * stats, gint div)
+ {
+ stats->shot_to_shot /= div;
+ stats->shot_to_snapshot /= div;
+ stats->shot_to_save /= div;
+ stats->preview_to_precapture /= div;
+ stats->shot_to_buffer /= div;
+ }
+
+ #define PRINT_STATS(d,s) g_print ("%02d | %" TIME_FORMAT " | %" \
+ TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT \
+ " | %" TIME_FORMAT "\n", d, \
+ TIME_ARGS ((s)->shot_to_save), TIME_ARGS ((s)->shot_to_snapshot), \
+ TIME_ARGS ((s)->shot_to_shot), \
+ TIME_ARGS ((s)->preview_to_precapture), \
+ TIME_ARGS ((s)->shot_to_buffer))
+
+ #define SHOT_TO_SAVE(t) ((t)->capture_done - (t)->start_capture)
+ #define SHOT_TO_SNAPSHOT(t) ((t)->got_preview - (t)->start_capture)
+ #define PREVIEW_TO_PRECAPTURE(t) ((t)->precapture - (t)->got_preview)
+ #define SHOT_TO_BUFFER(t) ((t)->camera_capture - (t)->start_capture)
+
+ /*
+ * Global vars
+ */
+ static GstElement *camerabin = NULL;
+ static GstElement *viewfinder_sink = NULL;
+ static gulong camera_probe_id = 0;
+ static gulong viewfinder_probe_id = 0;
+ static GMainLoop *loop = NULL;
+
+ /* commandline options */
+ static gchar *videosrc_name = NULL;
+ static gchar *videodevice_name = NULL;
+ static gchar *audiosrc_name = NULL;
+ static gchar *wrappersrc_name = NULL;
+ static gchar *imagepp_name = NULL;
+ static gchar *vfsink_name = NULL;
+ static gint image_width = 0;
+ static gint image_height = 0;
+ static gint view_framerate_num = 0;
+ static gint view_framerate_den = 0;
+ static gboolean no_xwindow = FALSE;
+ static gchar *gep_targetname = NULL;
+ static gchar *gep_profilename = NULL;
+ static gchar *gep_filename = NULL;
+ static gchar *image_capture_caps_str = NULL;
+ static gchar *viewfinder_caps_str = NULL;
+ static gchar *video_capture_caps_str = NULL;
+ static gchar *audio_capture_caps_str = NULL;
+ static gboolean performance_measure = FALSE;
+ static gchar *performance_targets_str = NULL;
+ static gchar *camerabin_flags = NULL;
+
+
+ #define MODE_VIDEO 2
+ #define MODE_IMAGE 1
+ static gint mode = MODE_IMAGE;
+ static gint zoom = 100;
+
+ static gint capture_time = 10;
+ static gint capture_count = 0;
+ static gint capture_total = 1;
+ static gulong stop_capture_cb_id = 0;
+
+ /* photography interface command line options */
+ #define EV_COMPENSATION_NONE -G_MAXFLOAT
+ #define APERTURE_NONE -G_MAXINT
+ #define FLASH_MODE_NONE -G_MAXINT
+ #define SCENE_MODE_NONE -G_MAXINT
+ #define EXPOSURE_NONE -G_MAXINT64
+ #define ISO_SPEED_NONE -G_MAXINT
+ #define WHITE_BALANCE_MODE_NONE -G_MAXINT
+ #define COLOR_TONE_MODE_NONE -G_MAXINT
+ static gfloat ev_compensation = EV_COMPENSATION_NONE;
+ static gint aperture = APERTURE_NONE;
+ static gint flash_mode = FLASH_MODE_NONE;
+ static gint scene_mode = SCENE_MODE_NONE;
+ static gint64 exposure = EXPOSURE_NONE;
+ static gint iso_speed = ISO_SPEED_NONE;
+ static gint wb_mode = WHITE_BALANCE_MODE_NONE;
+ static gint color_mode = COLOR_TONE_MODE_NONE;
+
+ static gchar *viewfinder_filter = NULL;
+
+ static int x_width = 320;
+ static int x_height = 240;
+
+ /* test configuration for common callbacks */
+ static GString *filename = NULL;
+
+ static gchar *preview_caps_name = NULL;
+
+ /* X window variables */
+ static Display *display = NULL;
+ static Window window = 0;
+
+ /* timing data */
+ static GstClockTime initial_time = 0;
+ static GstClockTime startup_time = 0;
+ static GstClockTime change_mode_before = 0;
+ static GstClockTime change_mode_after = 0;
+ static GList *capture_times = NULL;
+
+ static GstClockTime target_startup;
+ static GstClockTime target_change_mode;
+ static GstClockTime target_shot_to_shot;
+ static GstClockTime target_shot_to_save;
+ static GstClockTime target_shot_to_snapshot;
+ static GstClockTime target_preview_to_precapture;
+ static GstClockTime target_shot_to_buffer;
+
+
+ /*
+ * Prototypes
+ */
+ static gboolean run_pipeline (gpointer user_data);
+ static void set_metadata (GstElement * camera);
+
+ static void
+ create_host_window (void)
+ {
+ unsigned long valuemask;
+ XSetWindowAttributes attributes;
+
+ display = XOpenDisplay (NULL);
+ if (display) {
+ window =
+ XCreateSimpleWindow (display, DefaultRootWindow (display), 0, 0,
+ x_width, x_height, 0, 0, 0);
+ if (window) {
+ valuemask = CWOverrideRedirect;
+ attributes.override_redirect = True;
+ XChangeWindowAttributes (display, window, valuemask, &attributes);
+ XSetWindowBackgroundPixmap (display, window, None);
+ XMapRaised (display, window);
+ XSync (display, FALSE);
+ } else {
+ GST_DEBUG ("could not create X window!");
+ }
+ } else {
+ GST_DEBUG ("could not open display!");
+ }
+ }
+
+ static GstPadProbeReturn
+ camera_src_get_timestamp_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer udata)
+ {
+ CaptureTiming *timing;
+
+ timing = (CaptureTiming *) g_list_first (capture_times)->data;
+ timing->camera_capture = gst_util_get_timestamp ();
+
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ static GstPadProbeReturn
+ viewfinder_get_timestamp_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer udata)
+ {
+ CaptureTiming *timing;
+
+ timing = (CaptureTiming *) g_list_first (capture_times)->data;
+ timing->precapture = gst_util_get_timestamp ();
+
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ static GstBusSyncReply
+ sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
+ {
+ const GstStructure *st;
+ const GValue *image;
+ GstBuffer *buf = NULL;
+ gchar *preview_filename = NULL;
+ FILE *f = NULL;
+ size_t written;
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ELEMENT:{
+ st = gst_message_get_structure (message);
+ if (st) {
+ if (gst_message_has_name (message, "prepare-xwindow-id")) {
+ if (!no_xwindow && window) {
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY
+ (GST_MESSAGE_SRC (message)), window);
+ gst_message_unref (message);
+ message = NULL;
+ return GST_BUS_DROP;
+ }
+ } else if (gst_structure_has_name (st, "preview-image")) {
+ CaptureTiming *timing;
+
+ GST_DEBUG ("preview-image");
+
+ timing = (CaptureTiming *) g_list_first (capture_times)->data;
+ timing->got_preview = gst_util_get_timestamp ();
+
+ {
+ /* set up probe to check when the viewfinder gets data */
+ GstPad *pad = gst_element_get_static_pad (viewfinder_sink, "sink");
+
+ viewfinder_probe_id =
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
+ viewfinder_get_timestamp_probe, NULL, NULL);
+
+ gst_object_unref (pad);
+ }
+
+ /* extract preview-image from msg */
+ image = gst_structure_get_value (st, "buffer");
+ if (image) {
+ buf = gst_value_get_buffer (image);
+ preview_filename = g_strdup_printf ("test_vga.rgb");
+ f = g_fopen (preview_filename, "w");
+ if (f) {
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ written = fwrite (map.data, map.size, 1, f);
+ gst_buffer_unmap (buf, &map);
+ if (!written) {
+ g_print ("error writing file\n");
+ }
+ fclose (f);
+ } else {
+ g_print ("error opening file for raw image writing\n");
+ }
+ g_free (preview_filename);
+ }
+ }
+ }
+ break;
+ }
+ case GST_MESSAGE_STATE_CHANGED:
+ if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) {
+ GstState newstate;
+
+ gst_message_parse_state_changed (message, NULL, &newstate, NULL);
+ if (newstate == GST_STATE_PLAYING) {
+ startup_time = gst_util_get_timestamp ();
+ }
+ }
+ break;
+ default:
+ /* unhandled message */
+ break;
+ }
+ return GST_BUS_PASS;
+ }
+
+ static gboolean
+ bus_callback (GstBus * bus, GstMessage * message, gpointer data)
+ {
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ERROR:{
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_error (message, &err, &debug);
+ g_print ("Error: %s\n", err->message);
+ g_clear_error (&err);
+ g_free (debug);
+
+ /* Write debug graph to file */
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camerabin),
+ GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.error");
+
+ g_main_loop_quit (loop);
+ break;
+ }
+ case GST_MESSAGE_STATE_CHANGED:
+ if (GST_IS_BIN (GST_MESSAGE_SRC (message))) {
+ GstState oldstate, newstate;
+
+ gst_message_parse_state_changed (message, &oldstate, &newstate, NULL);
+ GST_DEBUG_OBJECT (GST_MESSAGE_SRC (message), "state-changed: %s -> %s",
+ gst_element_state_get_name (oldstate),
+ gst_element_state_get_name (newstate));
+ }
+ break;
+ case GST_MESSAGE_EOS:
+ /* end-of-stream */
+ GST_INFO ("got eos() - should not happen");
+ g_main_loop_quit (loop);
+ break;
+ case GST_MESSAGE_ELEMENT:
+ if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) {
+ const GstStructure *structure = gst_message_get_structure (message);
+
+ if (gst_structure_has_name (structure, "image-done")) {
+ CaptureTiming *timing;
+ #ifndef GST_DISABLE_GST_DEBUG
+ const gchar *fname = gst_structure_get_string (structure, "filename");
+
+ GST_DEBUG ("image done: %s", fname);
+ #endif
+ timing = (CaptureTiming *) g_list_first (capture_times)->data;
+ timing->capture_done = gst_util_get_timestamp ();
+
+ if (capture_count < capture_total) {
+ g_idle_add ((GSourceFunc) run_pipeline, NULL);
+ } else {
+ g_main_loop_quit (loop);
+ }
+ }
+ }
+ break;
+ default:
+ /* unhandled message */
+ break;
+ }
+ return TRUE;
+ }
+
+ /*
+ * Helpers
+ */
+
+ static void
+ cleanup_pipeline (void)
+ {
+ if (camerabin) {
+ GST_INFO_OBJECT (camerabin, "stopping and destroying");
+ gst_element_set_state (camerabin, GST_STATE_NULL);
+ gst_object_unref (camerabin);
+ camerabin = NULL;
+ }
+ }
+
+ static GstElement *
+ create_ipp_bin (void)
+ {
+ GstElement *bin = NULL, *element = NULL;
+ GstPad *pad = NULL;
+ gchar **elements;
+ GList *element_list = NULL, *current = NULL, *next = NULL;
+ int i;
+
+ bin = gst_bin_new ("ippbin");
+
+ elements = g_strsplit (imagepp_name, ",", 0);
+
+ for (i = 0; elements[i] != NULL; i++) {
+ element = gst_element_factory_make (elements[i], NULL);
+ if (element) {
+ element_list = g_list_append (element_list, element);
+ gst_bin_add (GST_BIN (bin), element);
+ } else
+ GST_WARNING ("Could create element %s for ippbin", elements[i]);
+ }
+
+ for (i = 1; i < g_list_length (element_list); i++) {
+ current = g_list_nth (element_list, i - 1);
+ next = g_list_nth (element_list, i);
+ gst_element_link (current->data, next->data);
+ }
+
+ current = g_list_first (element_list);
+ pad = gst_element_get_static_pad (current->data, "sink");
+ gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad));
+ gst_object_unref (GST_OBJECT (pad));
+
+ current = g_list_last (element_list);
+ pad = gst_element_get_static_pad (current->data, "src");
+ gst_element_add_pad (bin, gst_ghost_pad_new ("src", pad));
+ gst_object_unref (GST_OBJECT (pad));
+
+ g_list_free (element_list);
+ g_strfreev (elements);
+
+ return bin;
+ }
+
+ static GstEncodingProfile *
+ load_encoding_profile (void)
+ {
+ GstEncodingProfile *prof = NULL;
+ GstEncodingTarget *target = NULL;
+ GError *error = NULL;
+
+ /* if profile file was given, try to load profile from there */
+ if (gep_filename && gep_profilename) {
+ target = gst_encoding_target_load_from_file (gep_filename, &error);
+ if (!target) {
+ GST_WARNING ("Could not load target %s from file %s", gep_targetname,
+ gep_filename);
+ if (error) {
+ GST_WARNING ("Error from file loading: %s", error->message);
+ g_clear_error (&error);
+ }
+ } else {
+ prof = gst_encoding_target_get_profile (target, gep_profilename);
+ if (prof)
+ GST_DEBUG ("Loaded encoding profile %s from %s", gep_profilename,
+ gep_filename);
+ else
+ GST_WARNING
+ ("Could not load specified encoding profile %s from file %s",
+ gep_profilename, gep_filename);
+ }
+ /* if we could not load profile from file then try to find one from system */
+ } else if (gep_profilename && gep_targetname) {
+ prof = gst_encoding_profile_find (gep_targetname, gep_profilename, NULL);
+ if (prof)
+ GST_DEBUG ("Loaded encoding profile %s from target %s", gep_profilename,
+ gep_targetname);
+ } else
+ GST_DEBUG
+ ("Encoding profile not set, using camerabin default encoding profile");
+
+ return prof;
+ }
+
+ static gboolean
+ setup_pipeline_element (GstElement * element, const gchar * property_name,
+ const gchar * element_name, GstElement ** res_elem)
+ {
+ gboolean res = TRUE;
+ GstElement *elem = NULL;
+
+ if (element_name) {
+ GError *error = NULL;
+
+ elem = gst_parse_launch (element_name, &error);
+ if (elem) {
+ g_object_set (element, property_name, elem, NULL);
+ g_object_unref (elem);
+ } else {
+ GST_WARNING ("can't create element '%s' for property '%s'", element_name,
+ property_name);
+ if (error) {
+ GST_ERROR ("%s", error->message);
+ g_clear_error (&error);
+ }
+ res = FALSE;
+ }
+ } else {
+ GST_DEBUG ("no element for property '%s' given", property_name);
+ }
+ if (res_elem)
+ *res_elem = elem;
+ return res;
+ }
+
+ static void
+ set_camerabin_caps_from_string (void)
+ {
+ GstCaps *caps = NULL;
+ if (image_capture_caps_str != NULL) {
+ caps = gst_caps_from_string (image_capture_caps_str);
+ if (GST_CAPS_IS_SIMPLE (caps) && image_width > 0 && image_height > 0) {
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, image_width, "height",
+ G_TYPE_INT, image_height, NULL);
+ }
+ GST_DEBUG ("setting image-capture-caps: %" GST_PTR_FORMAT, caps);
+ g_object_set (camerabin, "image-capture-caps", caps, NULL);
+ gst_caps_unref (caps);
+ }
+
+ if (viewfinder_caps_str != NULL) {
+ caps = gst_caps_from_string (viewfinder_caps_str);
+ if (GST_CAPS_IS_SIMPLE (caps) && view_framerate_num > 0
+ && view_framerate_den > 0) {
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION,
+ view_framerate_num, view_framerate_den, NULL);
+ }
+ GST_DEBUG ("setting viewfinder-caps: %" GST_PTR_FORMAT, caps);
+ g_object_set (camerabin, "viewfinder-caps", caps, NULL);
+ gst_caps_unref (caps);
+ }
+
+ if (video_capture_caps_str != NULL) {
+ caps = gst_caps_from_string (video_capture_caps_str);
+ GST_DEBUG ("setting video-capture-caps: %" GST_PTR_FORMAT, caps);
+ g_object_set (camerabin, "video-capture-caps", caps, NULL);
+ gst_caps_unref (caps);
+ }
+
+ if (audio_capture_caps_str != NULL) {
+ caps = gst_caps_from_string (audio_capture_caps_str);
+ GST_DEBUG ("setting audio-capture-caps: %" GST_PTR_FORMAT, caps);
+ g_object_set (camerabin, "audio-capture-caps", caps, NULL);
+ gst_caps_unref (caps);
+ }
+ }
+
+ static gboolean
+ setup_pipeline (void)
+ {
+ gboolean res = TRUE;
+ GstBus *bus;
+ GstElement *sink = NULL, *ipp = NULL;
+ GstEncodingProfile *prof = NULL;
+
+ initial_time = gst_util_get_timestamp ();
+
+ camerabin = gst_element_factory_make ("camerabin", NULL);
+ if (NULL == camerabin) {
+ g_warning ("can't create camerabin element\n");
+ goto error;
+ }
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (camerabin));
+ /* Add sync handler for time critical messages that need to be handled fast */
+ gst_bus_set_sync_handler (bus, sync_bus_callback, NULL, NULL);
+ /* Handle normal messages asynchronously */
+ gst_bus_add_watch (bus, bus_callback, NULL);
+ gst_object_unref (bus);
+
+ GST_INFO_OBJECT (camerabin, "camerabin created");
+
+ if (camerabin_flags)
+ gst_util_set_object_arg (G_OBJECT (camerabin), "flags", camerabin_flags);
+ else
+ gst_util_set_object_arg (G_OBJECT (camerabin), "flags", "");
+
+ if (videosrc_name) {
+ GstElement *wrapper;
+ GstElement *videosrc;
+
+ if (wrappersrc_name)
+ wrapper = gst_element_factory_make (wrappersrc_name, NULL);
+ else
+ wrapper = gst_element_factory_make ("wrappercamerabinsrc", NULL);
+
+ if (setup_pipeline_element (wrapper, "video-source", videosrc_name, NULL)) {
+ g_object_set (camerabin, "camera-source", wrapper, NULL);
+ g_object_unref (wrapper);
+ } else {
+ GST_WARNING ("Failed to set videosrc to %s", videosrc_name);
+ }
+
+ g_object_get (wrapper, "video-source", &videosrc, NULL);
+ if (videosrc && videodevice_name &&
+ g_object_class_find_property (G_OBJECT_GET_CLASS (videosrc),
+ "device")) {
+ g_object_set (videosrc, "device", videodevice_name, NULL);
+ }
+ }
+
+ /* configure used elements */
+ res &=
+ setup_pipeline_element (camerabin, "audio-source", audiosrc_name, NULL);
+ res &=
+ setup_pipeline_element (camerabin, "viewfinder-sink", vfsink_name, &sink);
+ res &=
+ setup_pipeline_element (camerabin, "viewfinder-filter", viewfinder_filter,
+ NULL);
+
+ if (imagepp_name) {
+ ipp = create_ipp_bin ();
+ if (ipp) {
+ g_object_set (camerabin, "image-filter", ipp, NULL);
+ g_object_unref (ipp);
+ } else
+ GST_WARNING ("Could not create ipp elements");
+ }
+
+ prof = load_encoding_profile ();
+ if (prof) {
+ g_object_set (G_OBJECT (camerabin), "video-profile", prof, NULL);
+ gst_encoding_profile_unref (prof);
+ }
+
+ GST_INFO_OBJECT (camerabin, "elements created");
+
+ if (sink) {
+ g_object_set (sink, "sync", TRUE, NULL);
+ } else {
+ /* Get the inner viewfinder sink, this uses fixed names given
+ * by default in camerabin */
+ sink = gst_bin_get_by_name (GST_BIN (camerabin), "vf-bin");
+ g_assert (sink);
+ gst_object_unref (sink);
+
+ sink = gst_bin_get_by_name (GST_BIN (sink), "vfbin-sink");
+ g_assert (sink);
+ gst_object_unref (sink);
+ }
+ viewfinder_sink = sink;
+
+ GST_INFO_OBJECT (camerabin, "elements configured");
+
+ /* configure a resolution and framerate */
+ if (image_width > 0 && image_height > 0) {
+ if (mode == MODE_VIDEO) {
+ GstCaps *caps = NULL;
+ if (view_framerate_num > 0)
+ caps = gst_caps_new_full (gst_structure_new ("video/x-raw",
+ "width", G_TYPE_INT, image_width,
+ "height", G_TYPE_INT, image_height,
+ "framerate", GST_TYPE_FRACTION, view_framerate_num,
+ view_framerate_den, NULL), NULL);
+ else
+ caps = gst_caps_new_full (gst_structure_new ("video/x-raw",
+ "width", G_TYPE_INT, image_width,
+ "height", G_TYPE_INT, image_height, NULL), NULL);
+
+ g_object_set (camerabin, "video-capture-caps", caps, NULL);
+ gst_caps_unref (caps);
+ } else {
+ GstCaps *caps = gst_caps_new_full (gst_structure_new ("video/x-raw",
+ "width", G_TYPE_INT, image_width,
+ "height", G_TYPE_INT, image_height, NULL), NULL);
+
+ g_object_set (camerabin, "image-capture-caps", caps, NULL);
+ gst_caps_unref (caps);
+ }
+ }
+
+ set_camerabin_caps_from_string ();
+
+ /* change to the wrong mode if timestamping if performance mode is on so
+ * we can change it back and measure the time after in playing */
+ if (performance_measure) {
+ g_object_set (camerabin, "mode",
+ mode == MODE_VIDEO ? MODE_IMAGE : MODE_VIDEO, NULL);
+ }
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (camerabin, GST_STATE_READY)) {
+ g_warning ("can't set camerabin to ready\n");
+ goto error;
+ }
+ GST_INFO_OBJECT (camerabin, "camera ready");
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (camerabin, GST_STATE_PLAYING)) {
+ g_warning ("can't set camerabin to playing\n");
+ goto error;
+ }
+
+ GST_INFO_OBJECT (camerabin, "camera started");
+
+ /* do the mode change timestamping if performance mode is on */
+ if (performance_measure) {
+ change_mode_before = gst_util_get_timestamp ();
+ g_object_set (camerabin, "mode", mode, NULL);
+ change_mode_after = gst_util_get_timestamp ();
+ }
+
+ return TRUE;
+ error:
+ cleanup_pipeline ();
+ return FALSE;
+ }
+
+ static void
+ stop_capture_cb (GObject * self, GParamSpec * pspec, gpointer user_data)
+ {
+ gboolean idle = FALSE;
+
+ g_object_get (camerabin, "idle", &idle, NULL);
+
+ if (idle) {
+ if (capture_count < capture_total) {
+ g_idle_add ((GSourceFunc) run_pipeline, NULL);
+ } else {
+ g_main_loop_quit (loop);
+ }
+ }
+
+ g_signal_handler_disconnect (camerabin, stop_capture_cb_id);
+ }
+
+ static gboolean
+ stop_capture (gpointer user_data)
+ {
+ stop_capture_cb_id = g_signal_connect (camerabin, "notify::idle",
+ (GCallback) stop_capture_cb, camerabin);
+ g_signal_emit_by_name (camerabin, "stop-capture", 0);
+ return FALSE;
+ }
+
+ static void
+ set_metadata (GstElement * camera)
+ {
+ GstTagSetter *setter = GST_TAG_SETTER (camera);
+ GstDateTime *datetime;
+ gchar *desc_str;
+
+ datetime = gst_date_time_new_now_local_time ();
+
+ desc_str = g_strdup_printf ("captured by %s", g_get_real_name ());
+
+ gst_tag_setter_add_tags (setter, GST_TAG_MERGE_REPLACE,
+ GST_TAG_DATE_TIME, datetime,
+ GST_TAG_DESCRIPTION, desc_str,
+ GST_TAG_TITLE, "gst-camerabin-test capture",
+ GST_TAG_GEO_LOCATION_LONGITUDE, 1.0,
+ GST_TAG_GEO_LOCATION_LATITUDE, 2.0,
+ GST_TAG_GEO_LOCATION_ELEVATION, 3.0,
+ GST_TAG_DEVICE_MANUFACTURER, "gst-camerabin-test manufacturer",
+ GST_TAG_DEVICE_MODEL, "gst-camerabin-test model", NULL);
+
+ g_free (desc_str);
+ gst_date_time_unref (datetime);
+ }
+
+ static gboolean
+ run_pipeline (gpointer user_data)
+ {
+ GstCaps *preview_caps = NULL;
+ gchar *filename_str = NULL;
+ GstElement *video_source = NULL;
+ const gchar *filename_suffix;
+ CaptureTiming *timing;
+
+ g_object_set (camerabin, "mode", mode, NULL);
+
+ if (preview_caps_name != NULL) {
+ preview_caps = gst_caps_from_string (preview_caps_name);
+ if (preview_caps) {
+ g_object_set (camerabin, "preview-caps", preview_caps, NULL);
++ gst_caps_unref (preview_caps);
+ GST_DEBUG ("Preview caps set");
+ } else
+ GST_DEBUG ("Preview caps set but could not create caps from string");
+ }
+
+ set_metadata (camerabin);
+
+ /* Construct filename */
+ if (mode == MODE_VIDEO)
+ filename_suffix = ".mp4";
+ else
+ filename_suffix = ".jpg";
+ filename_str =
+ g_strdup_printf ("%s/test_%04u%s", filename->str, capture_count,
+ filename_suffix);
+ GST_DEBUG ("Setting filename: %s", filename_str);
+ g_object_set (camerabin, "location", filename_str, NULL);
+ g_free (filename_str);
+
+ g_object_get (camerabin, "camera-source", &video_source, NULL);
+ if (video_source) {
+ if (GST_IS_ELEMENT (video_source) && GST_IS_PHOTOGRAPHY (video_source)) {
+ /* Set GstPhotography interface options. If option not given as
+ command-line parameter use default of the source element. */
+ if (scene_mode != SCENE_MODE_NONE)
+ g_object_set (video_source, "scene-mode", scene_mode, NULL);
+ if (ev_compensation != EV_COMPENSATION_NONE)
+ g_object_set (video_source, "ev-compensation", ev_compensation, NULL);
+ if (aperture != APERTURE_NONE)
+ g_object_set (video_source, "aperture", aperture, NULL);
+ if (flash_mode != FLASH_MODE_NONE)
+ g_object_set (video_source, "flash-mode", flash_mode, NULL);
+ if (exposure != EXPOSURE_NONE)
+ g_object_set (video_source, "exposure", exposure, NULL);
+ if (iso_speed != ISO_SPEED_NONE)
+ g_object_set (video_source, "iso-speed", iso_speed, NULL);
+ if (wb_mode != WHITE_BALANCE_MODE_NONE)
+ g_object_set (video_source, "white-balance-mode", wb_mode, NULL);
+ if (color_mode != COLOR_TONE_MODE_NONE)
+ g_object_set (video_source, "colour-tone-mode", color_mode, NULL);
+ }
+ g_object_unref (video_source);
+ } else {
+ video_source = gst_bin_get_by_name (GST_BIN (camerabin), "camerasrc");
+ gst_object_unref (video_source);
+ }
+ g_object_set (camerabin, "zoom", zoom / 100.0f, NULL);
+
+ capture_count++;
+
+ timing = g_slice_new0 (CaptureTiming);
+ capture_times = g_list_prepend (capture_times, timing);
+
+ /* set pad probe to check when buffer leaves the camera source */
+ if (mode == MODE_IMAGE) {
+ GstPad *pad;
+
+ pad = gst_element_get_static_pad (video_source, "imgsrc");
+ camera_probe_id = gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
+ camera_src_get_timestamp_probe, NULL, NULL);
+
+ gst_object_unref (pad);
+ }
+ timing->start_capture = gst_util_get_timestamp ();
+ g_signal_emit_by_name (camerabin, "start-capture", 0);
+
+ if (mode == MODE_VIDEO) {
+ g_timeout_add ((capture_time * 1000), (GSourceFunc) stop_capture, NULL);
+ }
+
+ return FALSE;
+ }
+
+ static void
+ parse_target_values (void)
+ {
+ gdouble startup = 0, change_mode = 0, shot_to_save = 0, shot_to_snapshot = 0;
+ gdouble shot_to_shot = 0, preview_to_precapture = 0, shot_to_buffer = 0;
+
+ if (performance_targets_str == NULL)
+ return;
+
+ /*
+ startup time, change mode time, shot to save, shot to snapshot,
+ shot to shot, preview to precapture, shot to buffer.
+ */
+ sscanf (performance_targets_str, "%lf,%lf,%lf,%lf,%lf,%lf,%lf",
+ &startup, &change_mode, &shot_to_save,
+ &shot_to_snapshot, &shot_to_shot, &preview_to_precapture,
+ &shot_to_buffer);
+
+ target_startup = (GstClockTime) (startup * GST_SECOND);
+ target_change_mode = (GstClockTime) (change_mode * GST_SECOND);
+ target_shot_to_save = (GstClockTime) (shot_to_save * GST_SECOND);
+ target_shot_to_snapshot = (GstClockTime) (shot_to_snapshot * GST_SECOND);
+ target_shot_to_shot = (GstClockTime) (shot_to_shot * GST_SECOND);
+ target_preview_to_precapture =
+ (GstClockTime) (preview_to_precapture * GST_SECOND);
+ target_shot_to_buffer = (GstClockTime) (shot_to_buffer * GST_SECOND);
+ }
+
+ static void
+ print_performance_data (void)
+ {
+ GList *iter;
+ gint i = 0;
+ GstClockTime last_start = 0;
+ CaptureTimingStats max;
+ CaptureTimingStats min;
+ CaptureTimingStats avg;
+ CaptureTimingStats avg_wo_first;
+ GstClockTime shot_to_shot;
+
+ if (!performance_measure)
+ return;
+
+ parse_target_values ();
+
+ /* Initialize stats */
+ min.shot_to_shot = -1;
+ min.shot_to_save = -1;
+ min.shot_to_snapshot = -1;
+ min.preview_to_precapture = -1;
+ min.shot_to_buffer = -1;
+ memset (&avg, 0, sizeof (CaptureTimingStats));
+ memset (&avg_wo_first, 0, sizeof (CaptureTimingStats));
+ memset (&max, 0, sizeof (CaptureTimingStats));
+
+ g_print ("-- Performance results --\n");
+ g_print ("Startup time: %" TIME_FORMAT "; Target: %" TIME_FORMAT "\n",
+ TIME_ARGS (startup_time - initial_time), TIME_ARGS (target_startup));
+ g_print ("Change mode time: %" TIME_FORMAT "; Target: %" TIME_FORMAT "\n",
+ TIME_ARGS (change_mode_after - change_mode_before),
+ TIME_ARGS (target_change_mode));
+
+ g_print
+ ("\n | Shot to save |Shot to snapshot| Shot to shot |"
+ "Preview to precap| Shot to buffer\n");
+ capture_times = g_list_reverse (capture_times);
+ for (iter = capture_times; iter; iter = g_list_next (iter)) {
+ CaptureTiming *t = (CaptureTiming *) iter->data;
+ CaptureTimingStats stats;
+
+ stats.shot_to_save = SHOT_TO_SAVE (t);
+ stats.shot_to_snapshot = SHOT_TO_SNAPSHOT (t);
+ stats.shot_to_shot = i == 0 ? 0 : t->start_capture - last_start;
+ stats.preview_to_precapture = PREVIEW_TO_PRECAPTURE (t);
+ stats.shot_to_buffer = SHOT_TO_BUFFER (t);
+
+ PRINT_STATS (i, &stats);
+
+ if (i != 0) {
+ capture_timing_stats_add (&avg_wo_first, &stats);
+ }
+ capture_timing_stats_add (&avg, &stats);
+
+ if (stats.shot_to_save < min.shot_to_save) {
+ min.shot_to_save = stats.shot_to_save;
+ }
+ if (stats.shot_to_snapshot < min.shot_to_snapshot) {
+ min.shot_to_snapshot = stats.shot_to_snapshot;
+ }
+ if (stats.shot_to_shot < min.shot_to_shot && stats.shot_to_shot > 0) {
+ min.shot_to_shot = stats.shot_to_shot;
+ }
+ if (stats.preview_to_precapture < min.preview_to_precapture) {
+ min.preview_to_precapture = stats.preview_to_precapture;
+ }
+ if (stats.shot_to_buffer < min.shot_to_buffer) {
+ min.shot_to_buffer = stats.shot_to_buffer;
+ }
+
+
+ if (stats.shot_to_save > max.shot_to_save) {
+ max.shot_to_save = stats.shot_to_save;
+ }
+ if (stats.shot_to_snapshot > max.shot_to_snapshot) {
+ max.shot_to_snapshot = stats.shot_to_snapshot;
+ }
+ if (stats.shot_to_shot > max.shot_to_shot) {
+ max.shot_to_shot = stats.shot_to_shot;
+ }
+ if (stats.preview_to_precapture > max.preview_to_precapture) {
+ max.preview_to_precapture = stats.preview_to_precapture;
+ }
+ if (stats.shot_to_buffer > max.shot_to_buffer) {
+ max.shot_to_buffer = stats.shot_to_buffer;
+ }
+
+ last_start = t->start_capture;
+ i++;
+ }
+
+ if (i == 0)
+ return;
+
+ if (i > 1)
+ shot_to_shot = avg.shot_to_shot / (i - 1);
+ else
+ shot_to_shot = GST_CLOCK_TIME_NONE;
+ capture_timing_stats_div (&avg, i);
+ avg.shot_to_shot = shot_to_shot;
+ if (i > 1)
+ capture_timing_stats_div (&avg_wo_first, i - 1);
+ else {
+ memset (&avg_wo_first, 0, sizeof (CaptureTimingStats));
+ }
+
+ g_print ("\n Stats | MIN | MAX |"
+ " AVG | AVG wo First | Target | Diff \n");
+ g_print ("Shot to shot | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIMEDIFF_FORMAT "\n",
+ TIME_ARGS (min.shot_to_shot), TIME_ARGS (max.shot_to_shot),
+ TIME_ARGS (avg.shot_to_shot),
+ TIME_ARGS (avg_wo_first.shot_to_shot),
+ TIME_ARGS (target_shot_to_shot),
+ TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_shot, target_shot_to_shot)));
+ g_print ("Shot to save | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIMEDIFF_FORMAT "\n",
+ TIME_ARGS (min.shot_to_save), TIME_ARGS (max.shot_to_save),
+ TIME_ARGS (avg.shot_to_save),
+ TIME_ARGS (avg_wo_first.shot_to_save),
+ TIME_ARGS (target_shot_to_save),
+ TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_save, target_shot_to_save)));
+ g_print ("Shot to snapshot | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT
+ " | %" TIMEDIFF_FORMAT "\n",
+ TIME_ARGS (min.shot_to_snapshot),
+ TIME_ARGS (max.shot_to_snapshot),
+ TIME_ARGS (avg.shot_to_snapshot),
+ TIME_ARGS (avg_wo_first.shot_to_snapshot),
+ TIME_ARGS (target_shot_to_snapshot),
+ TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_snapshot,
+ target_shot_to_snapshot)));
+ g_print ("Preview to precapture | %" TIME_FORMAT " | %" TIME_FORMAT " | %"
+ TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIMEDIFF_FORMAT
+ "\n", TIME_ARGS (min.preview_to_precapture),
+ TIME_ARGS (max.preview_to_precapture),
+ TIME_ARGS (avg.preview_to_precapture),
+ TIME_ARGS (avg_wo_first.preview_to_precapture),
+ TIME_ARGS (target_preview_to_precapture),
+ TIMEDIFF_ARGS (TIME_DIFF (avg.preview_to_precapture,
+ target_preview_to_precapture)));
+ g_print ("Shot to buffer | %" TIME_FORMAT " | %" TIME_FORMAT " | %"
+ TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIMEDIFF_FORMAT
+ "\n", TIME_ARGS (min.shot_to_buffer), TIME_ARGS (max.shot_to_buffer),
+ TIME_ARGS (avg.shot_to_buffer), TIME_ARGS (avg_wo_first.shot_to_buffer),
+ TIME_ARGS (target_shot_to_buffer),
+ TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_buffer, target_shot_to_buffer)));
+ }
+
+ int
+ main (int argc, char *argv[])
+ {
+ gchar *target_times = NULL;
+ gchar *ev_option = NULL;
+ gchar *fn_option = NULL;
+
+ GOptionEntry options[] = {
+ {"ev-compensation", '\0', 0, G_OPTION_ARG_STRING, &ev_option,
+ "EV compensation for source element GstPhotography interface", NULL},
+ {"aperture", '\0', 0, G_OPTION_ARG_INT, &aperture,
+ "Aperture (size of lens opening) for source element GstPhotography interface",
+ NULL},
+ {"flash-mode", '\0', 0, G_OPTION_ARG_INT,
+ &flash_mode,
+ "Flash mode for source element GstPhotography interface", NULL},
+ {"scene-mode", '\0', 0, G_OPTION_ARG_INT,
+ &scene_mode,
+ "Scene mode for source element GstPhotography interface", NULL},
+ {"exposure", '\0', 0, G_OPTION_ARG_INT64,
+ &exposure,
+ "Exposure time (in ms) for source element GstPhotography interface",
+ NULL},
+ {"iso-speed", '\0', 0, G_OPTION_ARG_INT,
+ &iso_speed,
+ "ISO speed for source element GstPhotography interface", NULL},
+ {"white-balance-mode", '\0', 0, G_OPTION_ARG_INT,
+ &wb_mode,
+ "White balance mode for source element GstPhotography interface", NULL},
+ {"colour-tone-mode", '\0', 0, G_OPTION_ARG_INT,
+ &color_mode,
+ "Colour tone mode for source element GstPhotography interface", NULL},
+ {"directory", '\0', 0, G_OPTION_ARG_STRING, &fn_option,
+ "Directory for capture file(s) (default is current directory)", NULL},
+ {"mode", '\0', 0, G_OPTION_ARG_INT, &mode,
+ "Capture mode (default = 1 (image), 2 = video)", NULL},
+ {"capture-time", '\0', 0, G_OPTION_ARG_INT,
+ &capture_time,
+ "Time to capture video in seconds (default = 10)", NULL},
+ {"capture-total", '\0', 0, G_OPTION_ARG_INT, &capture_total,
+ "Total number of captures to be done (default = 1)", NULL},
+ {"zoom", '\0', 0, G_OPTION_ARG_INT, &zoom,
+ "Zoom (100 = 1x (default), 200 = 2x etc.)", NULL},
+ {"wrapper-source", '\0', 0, G_OPTION_ARG_STRING, &wrappersrc_name,
+ "Camera source wrapper used for setting the video source (default is wrappercamerabinsrc)",
+ NULL},
+ {"video-source", '\0', 0, G_OPTION_ARG_STRING, &videosrc_name,
+ "Video source used in still capture and video recording", NULL},
+ {"video-device", '\0', 0, G_OPTION_ARG_STRING, &videodevice_name,
+ "Video device to be set on the video source", NULL},
+ {"audio-source", '\0', 0, G_OPTION_ARG_STRING, &audiosrc_name,
+ "Audio source used in video recording", NULL},
+ {"image-pp", '\0', 0, G_OPTION_ARG_STRING, &imagepp_name,
+ "List of image post-processing elements separated with comma", NULL},
+ {"viewfinder-sink", '\0', 0, G_OPTION_ARG_STRING, &vfsink_name,
+ "Viewfinder sink (default = fakesink)", NULL},
+ {"image-width", '\0', 0, G_OPTION_ARG_INT, &image_width,
+ "Width for image capture", NULL},
+ {"image-height", '\0', 0, G_OPTION_ARG_INT, &image_height,
+ "Height for image capture", NULL},
+ {"view-framerate-num", '\0', 0, G_OPTION_ARG_INT, &view_framerate_num,
+ "Framerate numerator for viewfinder", NULL},
+ {"view-framerate-den", '\0', 0, G_OPTION_ARG_INT, &view_framerate_den,
+ "Framerate denominator for viewfinder", NULL},
+ {"preview-caps", '\0', 0, G_OPTION_ARG_STRING, &preview_caps_name,
+ "Preview caps (e.g. video/x-raw-rgb,width=320,height=240)", NULL},
+ {"viewfinder-filter", '\0', 0, G_OPTION_ARG_STRING, &viewfinder_filter,
+ "Filter to process all frames going to viewfinder sink", NULL},
+ {"x-width", '\0', 0, G_OPTION_ARG_INT, &x_width,
+ "X window width (default = 320)", NULL},
+ {"x-height", '\0', 0, G_OPTION_ARG_INT, &x_height,
+ "X window height (default = 240)", NULL},
+ {"no-xwindow", '\0', 0, G_OPTION_ARG_NONE, &no_xwindow,
+ "Do not create XWindow", NULL},
+ {"encoding-target", '\0', 0, G_OPTION_ARG_STRING, &gep_targetname,
+ "Video encoding target name", NULL},
+ {"encoding-profile", '\0', 0, G_OPTION_ARG_STRING, &gep_profilename,
+ "Video encoding profile name", NULL},
+ {"encoding-profile-filename", '\0', 0, G_OPTION_ARG_STRING, &gep_filename,
+ "Video encoding profile filename", NULL},
+ {"image-capture-caps", '\0', 0,
+ G_OPTION_ARG_STRING, &image_capture_caps_str,
+ "Image capture caps (e.g. video/x-raw-rgb,width=640,height=480)", NULL},
+ {"viewfinder-caps", '\0', 0, G_OPTION_ARG_STRING,
+ &viewfinder_caps_str,
+ "Viewfinder caps (e.g. video/x-raw-rgb,width=640,height=480)", NULL},
+ {"video-capture-caps", '\0', 0,
+ G_OPTION_ARG_STRING, &video_capture_caps_str,
+ "Video capture caps (e.g. video/x-raw-rgb,width=640,height=480)", NULL},
+ {"audio-capture-caps", '\0', 0,
+ G_OPTION_ARG_STRING, &audio_capture_caps_str,
+ "Audio capture caps (e.g. audio/x-raw-int,width=16,depth=16,rate=44100,channels=2)",
+ NULL},
+ {"performance-measure", '\0', 0,
+ G_OPTION_ARG_NONE, &performance_measure,
+ "If performance information should be printed at the end of execution",
+ NULL},
+ {"performance-targets", '\0', 0,
+ G_OPTION_ARG_STRING, &performance_targets_str,
+ "Comma separated list of doubles representing the target values in "
+ "seconds. The order is: startup time, change mode time, shot to save"
+ ", shot to snapshot, shot to shot, preview to shot, shot to buffer. "
+ "e.g. 3.5,1.0,5.0,2.5,5.0,1.5,1.0",
+ NULL},
+ {"flags", '\0', 0, G_OPTION_ARG_STRING, &camerabin_flags,
+ "camerabin element flags (default = 0)", NULL},
+ {NULL}
+ };
+
+ GOptionContext *ctx;
+ GError *err = NULL;
+
+ ctx = g_option_context_new ("\n\ncamerabin command line test application.");
+ g_option_context_add_main_entries (ctx, options, NULL);
+ g_option_context_add_group (ctx, gst_init_get_option_group ());
+ if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
+ g_print ("Error initializing: %s\n", err->message);
+ g_option_context_free (ctx);
+ g_clear_error (&err);
+ exit (1);
+ }
+ g_option_context_free (ctx);
+
+ GST_DEBUG_CATEGORY_INIT (camerabin_test, "camerabin-test", 0,
+ "camerabin test");
+
+ /* if we fail to create xwindow should we care? */
+ if (!no_xwindow)
+ create_host_window ();
+
+ /* FIXME: error handling */
+ if (ev_option != NULL)
+ ev_compensation = strtod (ev_option, (char **) NULL);
+
+ if (vfsink_name == NULL)
+ vfsink_name = g_strdup ("fakesink");
+
+ filename = g_string_new (fn_option);
+ if (filename->len == 0)
+ filename = g_string_append (filename, ".");
+
+ /* init */
+ if (setup_pipeline ()) {
+ loop = g_main_loop_new (NULL, FALSE);
+ g_idle_add ((GSourceFunc) run_pipeline, NULL);
+ g_main_loop_run (loop);
+ cleanup_pipeline ();
+ g_main_loop_unref (loop);
+ }
+
+ /* performance */
+ if (performance_measure) {
+ print_performance_data ();
+ }
+
+ /* free */
+ {
+ GList *iter;
+
+ for (iter = capture_times; iter; iter = g_list_next (iter)) {
+ g_slice_free (CaptureTiming, iter->data);
+ }
+ g_list_free (capture_times);
+ }
+
+ g_string_free (filename, TRUE);
+ g_free (ev_option);
+ g_free (wrappersrc_name);
+ g_free (videosrc_name);
+ g_free (videodevice_name);
+ g_free (audiosrc_name);
+ g_free (imagepp_name);
+ g_free (vfsink_name);
+ g_free (target_times);
+ g_free (gep_targetname);
+ g_free (gep_profilename);
+ g_free (gep_filename);
+ g_free (performance_targets_str);
+
+ if (window)
+ XDestroyWindow (display, window);
+
+ if (display)
+ XCloseDisplay (display);
+
+ return 0;
+ }