*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
- * with newer GLib versions (>= 2.31.0) */
-#define GLIB_DISABLE_DEPRECATION_WARNINGS
-
#include <string.h>
#include <gst/gst.h>
#include <gst/gst-i18n-plugin.h>
#include <gst/pbutils/pbutils.h>
#include <gst/video/video.h>
-#include <gst/interfaces/streamvolume.h>
+#include <gst/audio/streamvolume.h>
+#include <gst/video/colorbalance.h>
+#include <gst/video/videooverlay.h>
+#include <gst/video/navigation.h>
#include "gstplaysink.h"
#include "gststreamsynchronizer.h"
#define VOLUME_MAX_DOUBLE 10.0
#define DEFAULT_FLAGS GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_TEXT | \
- GST_PLAY_FLAG_SOFT_VOLUME
+ GST_PLAY_FLAG_SOFT_VOLUME | GST_PLAY_FLAG_SOFT_COLORBALANCE
#define GST_PLAY_CHAIN(c) ((GstPlayChain *)(c))
+/* enum types */
+/**
+ * GstPlaySinkSendEventMode:
+ * @MODE_DEFAULT: default GstBin's send_event handling
+ * @MODE_FIRST: send event only to the first sink that return true
+ *
+ * Send event handling to use
+ */
+typedef enum
+{
+ MODE_DEFAULT = 0,
+ MODE_FIRST = 1
+} GstPlaySinkSendEventMode;
+
+
+#define GST_TYPE_PLAY_SINK_SEND_EVENT_MODE (gst_play_sink_send_event_mode_get_type ())
+static GType
+gst_play_sink_send_event_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_DEFAULT, "Default GstBin's send_event handling (default)",
+ "default"},
+ {MODE_FIRST, "Sends the event to sinks until the first one handles it",
+ "first"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstPlaySinkSendEventMode", values);
+ }
+ return gtype;
+}
+
/* holds the common data fields for the audio and video pipelines. We keep them
* in a structure to more easily have all the info available. */
typedef struct
GstElement *queue;
GstElement *conv;
GstElement *resample;
- GstPad *blockpad; /* srcpad of resample, used for switching the vis */
+ GstPad *blockpad; /* srcpad of queue, used for blocking the vis */
+ GstPad *vispeerpad; /* srcpad of resample, used for unlinking the vis */
GstPad *vissinkpad; /* visualisation sinkpad, */
GstElement *vis;
GstPad *vissrcpad; /* visualisation srcpad, */
#define GST_PLAY_SINK_GET_LOCK(playsink) (&((GstPlaySink *)playsink)->lock)
#define GST_PLAY_SINK_LOCK(playsink) G_STMT_START { \
GST_LOG_OBJECT (playsink, "locking from thread %p", g_thread_self ()); \
- g_static_rec_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)); \
+ g_rec_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)); \
GST_LOG_OBJECT (playsink, "locked from thread %p", g_thread_self ()); \
} G_STMT_END
#define GST_PLAY_SINK_UNLOCK(playsink) G_STMT_START { \
GST_LOG_OBJECT (playsink, "unlocking from thread %p", g_thread_self ()); \
- g_static_rec_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)); \
+ g_rec_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)); \
} G_STMT_END
#define PENDING_FLAG_SET(playsink, flagtype) \
{
GstBin bin;
- GStaticRecMutex lock;
+ GRecMutex lock;
gboolean async_pending;
gboolean need_async_start;
gboolean audio_pad_blocked;
GstPad *audio_srcpad_stream_synchronizer;
GstPad *audio_sinkpad_stream_synchronizer;
+ gulong audio_block_id;
/* audio tee */
GstElement *audio_tee;
GstPad *audio_tee_sink;
gboolean video_pad_blocked;
GstPad *video_srcpad_stream_synchronizer;
GstPad *video_sinkpad_stream_synchronizer;
+ gulong video_block_id;
/* text */
GstPad *text_pad;
gboolean text_pad_blocked;
GstPad *text_srcpad_stream_synchronizer;
GstPad *text_sinkpad_stream_synchronizer;
+ gulong text_block_id;
guint32 pending_blocked_pads;
gchar *font_desc; /* font description */
gchar *subtitle_encoding; /* subtitle encoding */
guint connection_speed; /* connection speed in bits/sec (0 = unknown) */
- gint count;
+ guint count;
gboolean volume_changed; /* volume/mute changed while no audiochain */
gboolean mute_changed; /* ... has been created yet */
gint64 av_offset;
+ GstPlaySinkSendEventMode send_event_mode;
+ gboolean force_aspect_ratio;
+
+ /* videooverlay proxy interface */
+ GstVideoOverlay *overlay_element; /* protected with LOCK */
+ gboolean overlay_handle_set;
+ guintptr overlay_handle;
+ gboolean overlay_render_rectangle_set;
+ gint overlay_x, overlay_y, overlay_width, overlay_height;
+ gboolean overlay_handle_events_set;
+ gboolean overlay_handle_events;
+
+ /* colorbalance proxy interface */
+ GstColorBalance *colorbalance_element;
+ GList *colorbalance_channels; /* CONTRAST, BRIGHTNESS, HUE, SATURATION */
+ gint colorbalance_values[4];
+
+ /* sending audio/video flushes break stream changes when the pipeline
+ * is paused and played again in 0.10 */
+#if 0
+ GstSegment video_segment;
+ gboolean video_custom_flush_finished;
+ gboolean video_ignore_wrong_state;
+ gboolean video_pending_flush;
+
+ GstSegment audio_segment;
+ gboolean audio_custom_flush_finished;
+ gboolean audio_ignore_wrong_state;
+ gboolean audio_pending_flush;
+#endif
+
+ GstSegment text_segment;
+ gboolean text_custom_flush_finished;
+ gboolean text_ignore_wrong_state;
+ gboolean text_pending_flush;
};
struct _GstPlaySinkClass
gboolean (*reconfigure) (GstPlaySink * playsink);
- GstBuffer *(*convert_frame) (GstPlaySink * playsink, GstCaps * caps);
+ GstSample *(*convert_sample) (GstPlaySink * playsink, GstCaps * caps);
};
PROP_FONT_DESC,
PROP_SUBTITLE_ENCODING,
PROP_VIS_PLUGIN,
- PROP_FRAME,
+ PROP_SAMPLE,
PROP_AV_OFFSET,
PROP_VIDEO_SINK,
PROP_AUDIO_SINK,
PROP_TEXT_SINK,
+ PROP_SEND_EVENT_MODE,
+ PROP_FORCE_ASPECT_RATIO,
PROP_LAST
};
GValue * value, GParamSpec * spec);
static GstPad *gst_play_sink_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
static void gst_play_sink_release_request_pad (GstElement * element,
GstPad * pad);
static gboolean gst_play_sink_send_event (GstElement * element,
static void gst_play_sink_handle_message (GstBin * bin, GstMessage * message);
+/* sending audio/video flushes break stream changes when the pipeline
+ * is paused and played again in 0.10 */
+#if 0
+static gboolean gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_play_sink_video_sink_chain (GstPad * pad,
+ GstBuffer * buffer);
+static gboolean gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_play_sink_audio_sink_chain (GstPad * pad,
+ GstBuffer * buffer);
+#endif
+static gboolean gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_play_sink_text_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+
static void notify_volume_cb (GObject * object, GParamSpec * pspec,
GstPlaySink * playsink);
static void notify_mute_cb (GObject * object, GParamSpec * pspec,
static void update_av_offset (GstPlaySink * playsink);
-void
-gst_play_marshal_BUFFER__BOXED (GClosure * closure,
- GValue * return_value G_GNUC_UNUSED,
- guint n_param_values,
- const GValue * param_values,
- gpointer invocation_hint G_GNUC_UNUSED, gpointer marshal_data)
-{
- typedef GstBuffer *(*GMarshalFunc_OBJECT__BOXED) (gpointer data1,
- gpointer arg_1, gpointer data2);
- register GMarshalFunc_OBJECT__BOXED callback;
- register GCClosure *cc = (GCClosure *) closure;
- register gpointer data1, data2;
- GstBuffer *v_return;
- g_return_if_fail (return_value != NULL);
- g_return_if_fail (n_param_values == 2);
-
- if (G_CCLOSURE_SWAP_DATA (closure)) {
- data1 = closure->data;
- data2 = g_value_peek_pointer (param_values + 0);
- } else {
- data1 = g_value_peek_pointer (param_values + 0);
- data2 = closure->data;
- }
- callback =
- (GMarshalFunc_OBJECT__BOXED) (marshal_data ? marshal_data : cc->callback);
-
- v_return = callback (data1, g_value_get_boxed (param_values + 1), data2);
+static gboolean gst_play_sink_do_reconfigure (GstPlaySink * playsink);
- gst_value_take_buffer (return_value, v_return);
-}
+static GQuark _playsink_reset_segment_event_marker_id = 0;
/* static guint gst_play_sink_signals[LAST_SIGNAL] = { 0 }; */
+static void gst_play_sink_overlay_init (gpointer g_iface,
+ gpointer g_iface_data);
+static void gst_play_sink_navigation_init (gpointer g_iface,
+ gpointer g_iface_data);
+static void gst_play_sink_colorbalance_init (gpointer g_iface,
+ gpointer g_iface_data);
+
static void
_do_init (GType type)
{
static const GInterfaceInfo svol_info = {
NULL, NULL, NULL
};
+ static const GInterfaceInfo ov_info = {
+ gst_play_sink_overlay_init,
+ NULL, NULL
+ };
+ static const GInterfaceInfo nav_info = {
+ gst_play_sink_navigation_init,
+ NULL, NULL
+ };
+ static const GInterfaceInfo col_info = {
+ gst_play_sink_colorbalance_init,
+ NULL, NULL
+ };
g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_info);
+ g_type_add_interface_static (type, GST_TYPE_VIDEO_OVERLAY, &ov_info);
+ g_type_add_interface_static (type, GST_TYPE_NAVIGATION, &nav_info);
+ g_type_add_interface_static (type, GST_TYPE_COLOR_BALANCE, &col_info);
}
G_DEFINE_TYPE_WITH_CODE (GstPlaySink, gst_play_sink, GST_TYPE_BIN,
"the visualization element to use (NULL = default)",
GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
- * GstPlaySink:frame:
- *
- * Get the currently rendered or prerolled frame in the video sink.
- * The #GstCaps on the buffer will describe the format of the buffer.
+ * GstPlaySink:sample:
*
- * Since: 0.10.30
+ * Get the currently rendered or prerolled sample in the video sink.
+ * The #GstCaps in the sample will describe the format of the buffer.
*/
- g_object_class_install_property (gobject_klass, PROP_FRAME,
- gst_param_spec_mini_object ("frame", "Frame",
- "The last frame (NULL = no video available)",
- GST_TYPE_BUFFER, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_SAMPLE,
+ g_param_spec_boxed ("sample", "Sample",
+ "The last sample (NULL = no video available)",
+ GST_TYPE_SAMPLE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
/**
* GstPlaySink:av-offset:
*
g_param_spec_object ("audio-sink", "Audio Sink",
"the audio output element to use (NULL = default sink)",
GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
/**
* GstPlaySink:text-sink:
*
*/
g_object_class_install_property (gobject_klass, PROP_TEXT_SINK,
g_param_spec_object ("text-sink", "Text sink",
- "the text output element to use (NULL = default textoverlay)",
+ "the text output element to use (NULL = default subtitleoverlay)",
GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlaySink::send-event-mode:
+ *
+ * Sets the handling method used for events received from send_event
+ * function. The default is %MODE_DEFAULT, that uses %GstBin's default
+ * handling (push the event to all internal sinks).
+ *
+ * Since: 0.10.37
+ */
+ g_object_class_install_property (gobject_klass, PROP_SEND_EVENT_MODE,
+ g_param_spec_enum ("send-event-mode", "Send event mode",
+ "How to send events received in send_event function",
+ GST_TYPE_PLAY_SINK_SEND_EVENT_MODE, MODE_DEFAULT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlaySink::force-aspect-ratio:
+ *
+ * Requests the video sink to enforce the video display aspect ratio.
+ *
+ * Since: 0.10.37
+ */
+ g_object_class_install_property (gobject_klass, PROP_FORCE_ASPECT_RATIO,
+ g_param_spec_boolean ("force-aspect-ratio", "Force Aspect Ratio",
+ "When enabled, scaling will respect original aspect ratio", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_signal_new ("reconfigure", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstPlaySinkClass,
- reconfigure), NULL, NULL, gst_marshal_BOOLEAN__VOID, G_TYPE_BOOLEAN,
+ reconfigure), NULL, NULL, g_cclosure_marshal_generic, G_TYPE_BOOLEAN,
0, G_TYPE_NONE);
/**
- * GstPlaySink::convert-frame
+ * GstPlaySink::convert-sample
* @playsink: a #GstPlaySink
- * @caps: the target format of the frame
+ * @caps: the target format of the sample
*
- * Action signal to retrieve the currently playing video frame in the format
+ * Action signal to retrieve the currently playing video sample in the format
* specified by @caps.
* If @caps is %NULL, no conversion will be performed and this function is
- * equivalent to the #GstPlaySink::frame property.
+ * equivalent to the #GstPlaySink::sample property.
*
- * Returns: a #GstBuffer of the current video frame converted to #caps.
- * The caps on the buffer will describe the final layout of the buffer data.
- * %NULL is returned when no current buffer can be retrieved or when the
+ * Returns: a #GstSample of the current video sample converted to #caps.
+ * The caps in the sample will describe the final layout of the buffer data.
+ * %NULL is returned when no current sample can be retrieved or when the
* conversion failed.
- *
- * Since: 0.10.30
*/
- g_signal_new ("convert-frame", G_TYPE_FROM_CLASS (klass),
+ g_signal_new ("convert-sample", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
- G_STRUCT_OFFSET (GstPlaySinkClass, convert_frame), NULL, NULL,
- gst_play_marshal_BUFFER__BOXED, GST_TYPE_BUFFER, 1, GST_TYPE_CAPS);
-
- gst_element_class_add_static_pad_template (gstelement_klass,
- &audiorawtemplate);
- gst_element_class_add_static_pad_template (gstelement_klass, &audiotemplate);
- gst_element_class_add_static_pad_template (gstelement_klass,
- &videorawtemplate);
- gst_element_class_add_static_pad_template (gstelement_klass, &videotemplate);
- gst_element_class_add_static_pad_template (gstelement_klass, &texttemplate);
- gst_element_class_set_details_simple (gstelement_klass, "Player Sink",
+ G_STRUCT_OFFSET (GstPlaySinkClass, convert_sample), NULL, NULL,
+ g_cclosure_marshal_generic, GST_TYPE_SAMPLE, 1, GST_TYPE_CAPS);
+
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&audiorawtemplate));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&audiotemplate));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&videorawtemplate));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&videotemplate));
+ gst_element_class_add_pad_template (gstelement_klass,
+ gst_static_pad_template_get (&texttemplate));
+ gst_element_class_set_static_metadata (gstelement_klass, "Player Sink",
"Generic/Bin/Sink",
"Convenience sink for multiple streams",
"Wim Taymans <wim.taymans@gmail.com>");
GST_DEBUG_FUNCPTR (gst_play_sink_handle_message);
klass->reconfigure = GST_DEBUG_FUNCPTR (gst_play_sink_reconfigure);
- klass->convert_frame = GST_DEBUG_FUNCPTR (gst_play_sink_convert_frame);
+ klass->convert_sample = GST_DEBUG_FUNCPTR (gst_play_sink_convert_sample);
+
+ _playsink_reset_segment_event_marker_id =
+ g_quark_from_static_string ("gst-playsink-reset-segment-event-marker");
+
+ g_type_class_ref (GST_TYPE_STREAM_SYNCHRONIZER);
+ g_type_class_ref (GST_TYPE_COLOR_BALANCE_CHANNEL);
}
static void
gst_play_sink_init (GstPlaySink * playsink)
{
+ GstColorBalanceChannel *channel;
+
/* init groups */
playsink->video_sink = NULL;
playsink->audio_sink = NULL;
playsink->font_desc = NULL;
playsink->subtitle_encoding = NULL;
playsink->flags = DEFAULT_FLAGS;
+ playsink->send_event_mode = MODE_DEFAULT;
+ playsink->force_aspect_ratio = TRUE;
playsink->stream_synchronizer =
g_object_new (GST_TYPE_STREAM_SYNCHRONIZER, NULL);
gst_bin_add (GST_BIN_CAST (playsink),
GST_ELEMENT_CAST (playsink->stream_synchronizer));
- g_static_rec_mutex_init (&playsink->lock);
- GST_OBJECT_FLAG_SET (playsink, GST_ELEMENT_IS_SINK);
+ g_rec_mutex_init (&playsink->lock);
+ GST_OBJECT_FLAG_SET (playsink, GST_ELEMENT_FLAG_SINK);
+
+ channel =
+ GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
+ NULL));
+ channel->label = g_strdup ("CONTRAST");
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+ playsink->colorbalance_channels =
+ g_list_append (playsink->colorbalance_channels, channel);
+ playsink->colorbalance_values[0] = 0;
+
+ channel =
+ GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
+ NULL));
+ channel->label = g_strdup ("BRIGHTNESS");
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+ playsink->colorbalance_channels =
+ g_list_append (playsink->colorbalance_channels, channel);
+ playsink->colorbalance_values[1] = 0;
+
+ channel =
+ GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
+ NULL));
+ channel->label = g_strdup ("HUE");
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+ playsink->colorbalance_channels =
+ g_list_append (playsink->colorbalance_channels, channel);
+ playsink->colorbalance_values[2] = 0;
+
+ channel =
+ GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
+ NULL));
+ channel->label = g_strdup ("SATURATION");
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+ playsink->colorbalance_channels =
+ g_list_append (playsink->colorbalance_channels, channel);
+ playsink->colorbalance_values[3] = 0;
}
static void
playsink->stream_synchronizer = NULL;
+ g_list_foreach (playsink->colorbalance_channels, (GFunc) gst_object_unref,
+ NULL);
+ g_list_free (playsink->colorbalance_channels);
+ playsink->colorbalance_channels = NULL;
+
G_OBJECT_CLASS (gst_play_sink_parent_class)->dispose (object);
}
playsink = GST_PLAY_SINK (object);
- g_static_rec_mutex_free (&playsink->lock);
+ g_rec_mutex_clear (&playsink->lock);
G_OBJECT_CLASS (gst_play_sink_parent_class)->finalize (object);
}
return result;
}
-static void
-gst_play_sink_vis_unblocked (GstPad * tee_pad, gboolean blocked,
- gpointer user_data)
-{
- GstPlaySink *playsink;
-
- playsink = GST_PLAY_SINK (user_data);
- /* nothing to do here, we need a dummy callback here to make the async call
- * non-blocking. */
- GST_DEBUG_OBJECT (playsink, "vis pad unblocked");
-}
-
-static void
-gst_play_sink_vis_blocked (GstPad * tee_pad, gboolean blocked,
+static GstPadProbeReturn
+gst_play_sink_vis_blocked (GstPad * tee_pad, GstPadProbeInfo * info,
gpointer user_data)
{
GstPlaySink *playsink;
goto done;
/* unlink the old plugin and unghost the pad */
- gst_pad_unlink (chain->blockpad, chain->vissinkpad);
+ gst_pad_unlink (chain->vispeerpad, chain->vissinkpad);
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad), NULL);
/* set the old plugin to NULL and remove */
chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src");
/* link pads */
- gst_pad_link_full (chain->blockpad, chain->vissinkpad,
+ gst_pad_link_full (chain->vispeerpad, chain->vissinkpad,
GST_PAD_LINK_CHECK_NOTHING);
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad),
chain->vissrcpad);
done:
- /* Unblock the pad */
- gst_pad_set_blocked_async (tee_pad, FALSE, gst_play_sink_vis_unblocked,
- playsink);
GST_PLAY_SINK_UNLOCK (playsink);
+
+ /* remove the probe and unblock the pad */
+ return GST_PAD_PROBE_REMOVE;
}
void
* function returns FALSE but the previous pad block will do the right thing
* anyway. */
GST_DEBUG_OBJECT (playsink, "blocking vis pad");
- gst_pad_set_blocked_async (chain->blockpad, TRUE, gst_play_sink_vis_blocked,
- playsink);
+ gst_pad_add_probe (chain->blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ gst_play_sink_vis_blocked, playsink, NULL);
done:
GST_PLAY_SINK_UNLOCK (playsink);
else {
gst_bin_remove (GST_BIN_CAST (chain->playsink), chain->bin);
/* we don't want to lose our sink status */
- GST_OBJECT_FLAG_SET (chain->playsink, GST_ELEMENT_IS_SINK);
+ GST_OBJECT_FLAG_SET (chain->playsink, GST_ELEMENT_FLAG_SINK);
}
chain->added = add;
gboolean is_sink;
GST_OBJECT_LOCK (element);
- is_sink = GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_IS_SINK);
+ is_sink = GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_FLAG_SINK);
GST_OBJECT_UNLOCK (element);
GST_DEBUG_OBJECT (element, "is a sink: %s", (is_sink) ? "yes" : "no");
} FindPropertyHelper;
static gint
-find_property (GstElement * element, FindPropertyHelper * helper)
+find_property (const GValue * item, FindPropertyHelper * helper)
{
+ GstElement *element = g_value_get_object (item);
if (helper->need_sink && !element_is_sink (element)) {
- gst_object_unref (element);
return 1;
}
if (!element_has_property (element, helper->prop_name, helper->prop_type)) {
- gst_object_unref (element);
return 1;
}
if (element_has_property (obj, name, expected_type)) {
result = obj;
} else if (GST_IS_BIN (obj)) {
+ gboolean found;
+ GValue item = { 0, };
FindPropertyHelper helper = { name, expected_type, TRUE };
it = gst_bin_iterate_recurse (GST_BIN_CAST (obj));
- result = gst_iterator_find_custom (it,
- (GCompareFunc) find_property, &helper);
+ found = gst_iterator_find_custom (it,
+ (GCompareFunc) find_property, &item, &helper);
gst_iterator_free (it);
- /* we don't need the extra ref */
- if (result)
- gst_object_unref (result);
+ if (found) {
+ result = g_value_get_object (&item);
+ /* we don't need the extra ref */
+ g_value_unset (&item);
+ }
}
return result;
}
GstIterator *it;
if (GST_IS_BIN (obj)) {
+ gboolean found;
+ GValue item = { 0, };
FindPropertyHelper helper = { name, expected_type, FALSE };
it = gst_bin_iterate_recurse (GST_BIN_CAST (obj));
- result = gst_iterator_find_custom (it,
- (GCompareFunc) find_property, &helper);
+ found = gst_iterator_find_custom (it,
+ (GCompareFunc) find_property, &item, &helper);
gst_iterator_free (it);
+ if (found) {
+ result = g_value_dup_object (&item);
+ g_value_unset (&item);
+ }
} else {
if (element_has_property (obj, name, expected_type)) {
result = obj;
playsink->async_pending = TRUE;
GST_INFO_OBJECT (playsink, "Sending async_start message");
- message = gst_message_new_async_start (GST_OBJECT_CAST (playsink), FALSE);
+ message = gst_message_new_async_start (GST_OBJECT_CAST (playsink));
GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST
(playsink), message);
}
if (playsink->async_pending) {
GST_INFO_OBJECT (playsink, "Sending async_done message");
- message = gst_message_new_async_done (GST_OBJECT_CAST (playsink));
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (playsink),
+ GST_CLOCK_TIME_NONE);
GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST
(playsink), message);
GST_DEBUG_OBJECT (playsink, "creating deinterlace");
chain->deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
if (chain->deinterlace == NULL) {
+ chain->deinterlace =
+ gst_element_factory_make ("avdeinterlace", "deinterlace");
+ }
+ if (chain->deinterlace == NULL) {
post_missing_element_message (playsink, "deinterlace");
GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
}
}
+static gboolean
+is_valid_color_balance_element (GstColorBalance * bal)
+{
+ gboolean have_brightness = FALSE;
+ gboolean have_contrast = FALSE;
+ gboolean have_hue = FALSE;
+ gboolean have_saturation = FALSE;
+ const GList *channels, *l;
+
+ channels = gst_color_balance_list_channels (bal);
+ for (l = channels; l; l = l->next) {
+ GstColorBalanceChannel *ch = l->data;
+
+ if (g_strrstr (ch->label, "BRIGHTNESS"))
+ have_brightness = TRUE;
+ else if (g_strrstr (ch->label, "CONTRAST"))
+ have_contrast = TRUE;
+ else if (g_strrstr (ch->label, "HUE"))
+ have_hue = TRUE;
+ else if (g_strrstr (ch->label, "SATURATION"))
+ have_saturation = TRUE;
+ }
+
+ return have_brightness && have_contrast && have_hue && have_saturation;
+}
+
+static void
+iterate_color_balance_elements (const GValue * item, gpointer user_data)
+{
+ gboolean valid;
+ GstColorBalance *cb, **cb_out = user_data;
+
+ cb = GST_COLOR_BALANCE (g_value_get_object (item));
+ valid = is_valid_color_balance_element (cb);
+ if (valid) {
+ if (*cb_out
+ && gst_color_balance_get_balance_type (*cb_out) ==
+ GST_COLOR_BALANCE_SOFTWARE) {
+ gst_object_unref (*cb_out);
+ *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb));
+ } else if (!*cb_out) {
+ *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb));
+ }
+ }
+}
+
+static GstColorBalance *
+find_color_balance_element (GstElement * element)
+{
+ GstIterator *it;
+ GstColorBalance *cb = NULL;
+
+ if (GST_IS_COLOR_BALANCE (element)
+ && is_valid_color_balance_element (GST_COLOR_BALANCE (element)))
+ return GST_COLOR_BALANCE (gst_object_ref (element));
+ else if (!GST_IS_BIN (element))
+ return FALSE;
+
+ it = gst_bin_iterate_all_by_interface (GST_BIN (element),
+ GST_TYPE_COLOR_BALANCE);
+ while (gst_iterator_foreach (it, iterate_color_balance_elements,
+ &cb) == GST_ITERATOR_RESYNC)
+ gst_iterator_resync (it);
+ gst_iterator_free (it);
+
+ return cb;
+}
+
+static void
+colorbalance_value_changed_cb (GstColorBalance * balance,
+ GstColorBalanceChannel * channel, gint value, GstPlaySink * playsink)
+{
+ GList *l;
+ gint i;
+
+ for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
+ GstColorBalanceChannel *proxy = l->data;
+
+ if (g_strrstr (channel->label, proxy->label)) {
+ gdouble new_val;
+
+ /* Convert to [0, 1] range */
+ new_val =
+ ((gdouble) value -
+ (gdouble) channel->min_value) / ((gdouble) channel->max_value -
+ (gdouble) channel->min_value);
+ /* Convert to proxy range */
+ new_val =
+ proxy->min_value + new_val * ((gdouble) proxy->max_value -
+ (gdouble) proxy->min_value);
+ playsink->colorbalance_values[i] = (gint) (0.5 + new_val);
+
+ gst_color_balance_value_changed (GST_COLOR_BALANCE (playsink), proxy,
+ playsink->colorbalance_values[i]);
+ break;
+ }
+ }
+}
+
+static void
+update_colorbalance (GstPlaySink * playsink)
+{
+ GstColorBalance *balance = NULL;
+ GList *l;
+ gint i;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->colorbalance_element) {
+ balance =
+ GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
+ }
+ GST_OBJECT_UNLOCK (playsink);
+ if (!balance)
+ return;
+
+ g_signal_handlers_block_by_func (balance,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+
+ for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
+ GstColorBalanceChannel *proxy = l->data;
+ GstColorBalanceChannel *channel = NULL;
+ const GList *channels, *k;
+
+ channels = gst_color_balance_list_channels (balance);
+ for (k = channels; k; k = k->next) {
+ GstColorBalanceChannel *tmp = k->data;
+
+ if (g_strrstr (tmp->label, proxy->label)) {
+ channel = tmp;
+ break;
+ }
+ }
+
+ g_assert (channel);
+
+ gst_color_balance_set_value (balance, channel,
+ playsink->colorbalance_values[i]);
+ }
+
+ g_signal_handlers_unblock_by_func (balance,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+
+ gst_object_unref (balance);
+}
+
/* make the element (bin) that contains the elements needed to perform
* video display.
*
chain->async = TRUE;
}
+ /* Make sure the aspect ratio is kept */
+ elem =
+ gst_play_sink_find_property_sinks (playsink, chain->sink,
+ "force-aspect-ratio", G_TYPE_BOOLEAN);
+ if (elem)
+ g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio,
+ NULL);
+
/* find ts-offset element */
gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
gst_object_ref_sink (bin);
gst_bin_add (bin, chain->sink);
+ /* Get the VideoOverlay element */
+ {
+ GstVideoOverlay *overlay = NULL;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element =
+ GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin),
+ GST_TYPE_VIDEO_OVERLAY));
+ if (playsink->overlay_element)
+ overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (overlay) {
+ if (playsink->overlay_handle_set)
+ gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle);
+ if (playsink->overlay_handle_events_set)
+ gst_video_overlay_handle_events (overlay,
+ playsink->overlay_handle_events);
+ if (playsink->overlay_render_rectangle_set)
+ gst_video_overlay_set_render_rectangle (overlay,
+ playsink->overlay_x, playsink->overlay_y,
+ playsink->overlay_width, playsink->overlay_height);
+ gst_object_unref (overlay);
+ }
+ }
+
/* decouple decoder from sink, this improves playback quite a lot since the
* decoder can continue while the sink blocks for synchronisation. We don't
* need a lot of buffers as this consumes a lot of memory and we don't want
head = prev = chain->queue;
}
- if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) {
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->colorbalance_element) {
+ g_signal_handlers_disconnect_by_func (playsink->colorbalance_element,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ gst_object_unref (playsink->colorbalance_element);
+ }
+ playsink->colorbalance_element = find_color_balance_element (chain->sink);
+ if (playsink->colorbalance_element) {
+ g_signal_connect (playsink->colorbalance_element, "value-changed",
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ }
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)
+ || (!playsink->colorbalance_element
+ && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE))) {
+ gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO);
+ gboolean use_balance = !playsink->colorbalance_element
+ && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE);
+
GST_DEBUG_OBJECT (playsink, "creating videoconverter");
chain->conv =
- g_object_new (GST_TYPE_PLAY_SINK_VIDEO_CONVERT, "name", "vconv", NULL);
+ g_object_new (GST_TYPE_PLAY_SINK_VIDEO_CONVERT, "name", "vconv",
+ "use-converters", use_converters, "use-balance", use_balance, NULL);
+
+ GST_OBJECT_LOCK (playsink);
+ if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance)
+ playsink->colorbalance_element =
+ GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT
+ (chain->conv)->balance));
+ GST_OBJECT_UNLOCK (playsink);
+
gst_bin_add (bin, chain->conv);
if (prev) {
if (!gst_element_link_pads_full (prev, "src", chain->conv, "sink",
prev = chain->conv;
}
+ update_colorbalance (playsink);
+
if (prev) {
GST_DEBUG_OBJECT (playsink, "linking to sink");
if (!gst_element_link_pads_full (prev, "src", chain->sink, NULL,
pad = gst_element_get_static_pad (head, "sink");
chain->sinkpad = gst_ghost_pad_new ("sink", pad);
- gst_object_unref (pad);
+ /* sending audio/video flushes break stream changes when the pipeline
+ * is paused and played again in 0.10 */
+#if 0
+ gst_pad_set_event_function (chain->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_event));
+ gst_pad_set_chain_function (chain->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_chain));
+#endif
+
+ gst_object_unref (pad);
gst_element_add_pad (chain->chain.bin, chain->sinkpad);
return chain;
free_chain ((GstPlayChain *) chain);
return NULL;
}
+
link_failed:
{
GST_ELEMENT_ERROR (playsink, CORE, PAD,
if (ret == GST_STATE_CHANGE_FAILURE)
return FALSE;
- /* find ts-offset element */
+ /* Get the VideoOverlay element */
+ {
+ GstVideoOverlay *overlay = NULL;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element =
+ GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin),
+ GST_TYPE_VIDEO_OVERLAY));
+ if (playsink->overlay_element)
+ overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (overlay) {
+ if (playsink->overlay_handle_set)
+ gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle);
+ if (playsink->overlay_handle_events_set)
+ gst_video_overlay_handle_events (overlay,
+ playsink->overlay_handle_events);
+ if (playsink->overlay_render_rectangle_set)
+ gst_video_overlay_set_render_rectangle (overlay,
+ playsink->overlay_x, playsink->overlay_y,
+ playsink->overlay_width, playsink->overlay_height);
+ gst_object_unref (overlay);
+ }
+ }
+ /* find ts-offset element */
gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
G_TYPE_INT64));
GST_DEBUG_OBJECT (playsink, "no async property on the sink");
chain->async = TRUE;
}
+
+ /* Make sure the aspect ratio is kept */
+ elem =
+ gst_play_sink_find_property_sinks (playsink, chain->sink,
+ "force-aspect-ratio", G_TYPE_BOOLEAN);
+ if (elem)
+ g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio,
+ NULL);
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->colorbalance_element) {
+ g_signal_handlers_disconnect_by_func (playsink->colorbalance_element,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ gst_object_unref (playsink->colorbalance_element);
+ }
+ playsink->colorbalance_element = find_color_balance_element (chain->sink);
+ if (playsink->colorbalance_element) {
+ g_signal_connect (playsink->colorbalance_element, "value-changed",
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ }
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (chain->conv) {
+ gboolean use_balance = !playsink->colorbalance_element
+ && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE);
+
+ g_object_set (chain->conv, "use-balance", use_balance, NULL);
+
+ GST_OBJECT_LOCK (playsink);
+ if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance)
+ playsink->colorbalance_element =
+ GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT
+ (chain->conv)->balance));
+ GST_OBJECT_UNLOCK (playsink);
+ }
+
+ update_colorbalance (playsink);
+
return TRUE;
}
+static void
+_generate_update_newsegment_event (GstPad * pad, GstSegment * segment,
+ GstEvent ** event1)
+{
+ GstEvent *event;
+ GstStructure *structure;
+ event = gst_event_new_segment (segment);
+ structure = gst_event_writable_structure (event);
+ gst_structure_id_set (structure,
+ _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
+ *event1 = event;
+}
+
+static gboolean
+gst_play_sink_sink_event (GstPad * pad, GstObject * parent, GstEvent * event,
+ const gchar * sink_type,
+ gboolean * sink_ignore_wrong_state,
+ gboolean * sink_custom_flush_finished,
+ gboolean * sink_pending_flush, GstSegment * sink_segment)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
+ gboolean ret;
+ const GstStructure *structure = gst_event_get_structure (event);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_DOWNSTREAM_OOB && structure) {
+ gchar *custom_flush;
+ gchar *custom_flush_finish;
+
+ custom_flush = g_strdup_printf ("playsink-custom-%s-flush", sink_type);
+ custom_flush_finish =
+ g_strdup_printf ("playsink-custom-%s-flush-finish", sink_type);
+ if (strcmp (gst_structure_get_name (structure), custom_flush) == 0) {
+ GST_DEBUG_OBJECT (pad,
+ "Custom %s flush event received, marking to flush %s", sink_type,
+ sink_type);
+ GST_PLAY_SINK_LOCK (playsink);
+ *sink_ignore_wrong_state = TRUE;
+ *sink_custom_flush_finished = FALSE;
+ GST_PLAY_SINK_UNLOCK (playsink);
+ } else if (strcmp (gst_structure_get_name (structure),
+ custom_flush_finish) == 0) {
+ GST_DEBUG_OBJECT (pad, "Custom %s flush finish event received",
+ sink_type);
+ GST_PLAY_SINK_LOCK (playsink);
+ *sink_pending_flush = TRUE;
+ *sink_custom_flush_finished = TRUE;
+ GST_PLAY_SINK_UNLOCK (playsink);
+ }
+
+ g_free (custom_flush);
+ g_free (custom_flush_finish);
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ GST_PLAY_SINK_LOCK (playsink);
+ GST_DEBUG_OBJECT (pad, "Resetting %s segment because of flush-stop event",
+ sink_type);
+ gst_segment_init (sink_segment, GST_FORMAT_UNDEFINED);
+ GST_PLAY_SINK_UNLOCK (playsink);
+ }
+
+ GST_DEBUG_OBJECT (pad, "Forwarding event %" GST_PTR_FORMAT, event);
+ ret = gst_pad_event_default (pad, parent, gst_event_ref (event));
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+ GST_DEBUG_OBJECT (pad, "Segment event: %" GST_SEGMENT_FORMAT, segment);
+
+ GST_PLAY_SINK_LOCK (playsink);
+ if (sink_segment->format != segment->format) {
+ GST_DEBUG_OBJECT (pad, "%s segment format changed: %s -> %s",
+ sink_type,
+ gst_format_get_name (sink_segment->format),
+ gst_format_get_name (segment->format));
+ gst_segment_init (sink_segment, segment->format);
+ }
+
+ GST_DEBUG_OBJECT (pad, "Old %s segment: %" GST_SEGMENT_FORMAT,
+ sink_type, sink_segment);
+ gst_segment_copy_into (&playsink->text_segment, sink_segment);
+ GST_DEBUG_OBJECT (pad, "New %s segment: %" GST_SEGMENT_FORMAT,
+ sink_type, sink_segment);
+ GST_PLAY_SINK_UNLOCK (playsink);
+ }
+
+ gst_event_unref (event);
+ gst_object_unref (playsink);
+ return ret;
+}
+
+static GstFlowReturn
+gst_play_sink_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer,
+ const gchar * sink_type,
+ gboolean * sink_ignore_wrong_state,
+ gboolean * sink_custom_flush_finished,
+ gboolean * sink_pending_flush, GstSegment * sink_segment)
+{
+ GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
+ GstFlowReturn ret;
+
+ GST_PLAY_SINK_LOCK (playsink);
+
+ if (*sink_pending_flush) {
+ GstEvent *event;
+ GstStructure *structure;
+
+ *sink_pending_flush = FALSE;
+
+ GST_PLAY_SINK_UNLOCK (playsink);
+
+ /* make the bin drop all cached data.
+ * This event will be dropped on the src pad, if any. */
+ event = gst_event_new_flush_start ();
+ structure = gst_event_writable_structure (event);
+ gst_structure_id_set (structure,
+ _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
+
+ GST_DEBUG_OBJECT (pad,
+ "Pushing %s flush-start event with reset segment marker set: %"
+ GST_PTR_FORMAT, sink_type, event);
+ gst_pad_send_event (pad, event);
+
+ /* make queue drop all cached data.
+ * This event will be dropped on the src pad. */
+ event = gst_event_new_flush_stop (TRUE);
+ structure = gst_event_writable_structure (event);
+ gst_structure_id_set (structure,
+ _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
+
+ GST_DEBUG_OBJECT (pad,
+ "Pushing %s flush-stop event with reset segment marker set: %"
+ GST_PTR_FORMAT, sink_type, event);
+ gst_pad_send_event (pad, event);
+
+ /* Re-sync queue segment info after flush-stop.
+ * This event will be dropped on the src pad. */
+ if (sink_segment->format != GST_FORMAT_UNDEFINED) {
+ GstEvent *event1;
+
+ _generate_update_newsegment_event (pad, sink_segment, &event1);
+ GST_DEBUG_OBJECT (playsink,
+ "Pushing segment event with reset "
+ "segment marker set: %" GST_PTR_FORMAT, event1);
+ gst_pad_send_event (pad, event1);
+ }
+ } else {
+ GST_PLAY_SINK_UNLOCK (playsink);
+ }
+
+ ret = gst_proxy_pad_chain_default (pad, parent, buffer);
+
+ GST_PLAY_SINK_LOCK (playsink);
+ if (ret == GST_FLOW_FLUSHING && *sink_ignore_wrong_state) {
+ GST_DEBUG_OBJECT (pad, "Ignoring wrong state for %s during flush",
+ sink_type);
+ if (*sink_custom_flush_finished) {
+ GST_DEBUG_OBJECT (pad, "Custom flush finished, stop ignoring "
+ "wrong state for %s", sink_type);
+ *sink_ignore_wrong_state = FALSE;
+ }
+
+ ret = GST_FLOW_OK;
+ }
+ GST_PLAY_SINK_UNLOCK (playsink);
+
+ gst_object_unref (playsink);
+ gst_object_unref (tbin);
+ return ret;
+}
+
+/* sending audio/video flushes break stream changes when the pipeline
+ * is paused and played again in 0.10 */
+#if 0
+static gboolean
+gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
+ gboolean ret;
+
+ ret = gst_play_sink_sink_event (pad, event, "video",
+ &playsink->video_ignore_wrong_state,
+ &playsink->video_custom_flush_finished,
+ &playsink->video_pending_flush, &playsink->video_segment);
+
+ gst_object_unref (playsink);
+ gst_object_unref (tbin);
+ return ret;
+}
+
+static GstFlowReturn
+gst_play_sink_video_sink_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
+ gboolean ret;
+
+ ret = gst_play_sink_sink_chain (pad, buffer, "video",
+ &playsink->video_ignore_wrong_state,
+ &playsink->video_custom_flush_finished,
+ &playsink->video_pending_flush, &playsink->video_segment);
+
+ gst_object_unref (playsink);
+ gst_object_unref (tbin);
+ return ret;
+}
+
+static gboolean
+gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
+ gboolean ret;
+
+ ret = gst_play_sink_sink_event (pad, event, "audio",
+ &playsink->audio_ignore_wrong_state,
+ &playsink->audio_custom_flush_finished,
+ &playsink->audio_pending_flush, &playsink->audio_segment);
+
+ gst_object_unref (playsink);
+ gst_object_unref (tbin);
+ return ret;
+}
+
+static GstFlowReturn
+gst_play_sink_audio_sink_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
+ gboolean ret;
+
+ ret = gst_play_sink_sink_chain (pad, buffer, "audio",
+ &playsink->audio_ignore_wrong_state,
+ &playsink->audio_custom_flush_finished,
+ &playsink->audio_pending_flush, &playsink->audio_segment);
+
+ gst_object_unref (playsink);
+ gst_object_unref (tbin);
+ return ret;
+}
+#endif
+
+static gboolean
+gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
+ gboolean ret;
+
+ ret = gst_play_sink_sink_event (pad, parent, event, "subtitle",
+ &playsink->text_ignore_wrong_state,
+ &playsink->text_custom_flush_finished,
+ &playsink->text_pending_flush, &playsink->text_segment);
+
+ gst_object_unref (playsink);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_play_sink_text_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ gboolean ret;
+ GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
+
+ ret = gst_play_sink_sink_chain (pad, parent, buffer, "subtitle",
+ &playsink->text_ignore_wrong_state,
+ &playsink->text_custom_flush_finished,
+ &playsink->text_pending_flush, &playsink->text_segment);
+
+ gst_object_unref (playsink);
+ return ret;
+}
+
+static gboolean
+gst_play_sink_text_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean ret;
+ const GstStructure *structure;
+
+ GST_DEBUG_OBJECT (pad, "Got event %" GST_PTR_FORMAT, event);
+
+ structure = gst_event_get_structure (event);
+
+ if (structure &&
+ gst_structure_id_has_field (structure,
+ _playsink_reset_segment_event_marker_id)) {
+ /* the events marked with a reset segment marker
+ * are sent internally to reset the queue and
+ * must be dropped here */
+ GST_DEBUG_OBJECT (pad, "Dropping event with reset "
+ "segment marker set: %" GST_PTR_FORMAT, event);
+ ret = TRUE;
+ goto out;
+ }
+
+ ret = gst_pad_event_default (pad, parent, gst_event_ref (event));
+
+out:
+ gst_event_unref (event);
+ return ret;
+}
+
/* make an element for playback of video with subtitles embedded.
* Only used for *raw* video streams.
*
"queue"), ("rendering might be suboptimal"));
} else {
g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3,
- "max-size-bytes", 0, "max-size-time", (gint64) 0,
+ "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND,
"silent", TRUE, NULL);
gst_bin_add (bin, chain->queue);
}
if (textsinkpad == NULL) {
GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
(_("Custom text sink element is not usable.")),
- ("fallback to default textoverlay"));
+ ("fallback to default subtitleoverlay"));
}
}
"queue"), ("rendering might be suboptimal"));
} else {
g_object_set (G_OBJECT (element), "max-size-buffers", 3,
- "max-size-bytes", 0, "max-size-time", (gint64) 0,
+ "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND,
"silent", TRUE, NULL);
gst_bin_add (bin, element);
if (gst_element_link_pads_full (element, "src", chain->overlay,
if (textsinkpad) {
chain->textsinkpad = gst_ghost_pad_new ("text_sink", textsinkpad);
gst_object_unref (textsinkpad);
+
+ gst_pad_set_event_function (chain->textsinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_event));
+ gst_pad_set_chain_function (chain->textsinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_chain));
+
gst_element_add_pad (chain->chain.bin, chain->textsinkpad);
}
if (srcpad) {
chain->srcpad = gst_ghost_pad_new ("src", srcpad);
gst_object_unref (srcpad);
+
+ gst_pad_set_event_function (chain->srcpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_text_src_event));
+
gst_element_add_pad (chain->chain.bin, chain->srcpad);
}
}
if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO) || (!have_volume
- && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) {
+ && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME))) {
gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO);
gboolean use_volume =
- !have_volume && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME;
+ !have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME);
GST_DEBUG_OBJECT (playsink,
"creating audioconvert with use-converters %d, use-volume %d",
use_converters, use_volume);
}
prev = chain->conv;
- if (!have_volume && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME) {
+ if (!have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) {
GstPlaySinkAudioConvert *conv =
GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv);
GST_DEBUG_OBJECT (playsink, "ghosting sink pad");
pad = gst_element_get_static_pad (head, "sink");
chain->sinkpad = gst_ghost_pad_new ("sink", pad);
+
+ /* sending audio/video flushes break stream changes when the pipeline
+ * is paused and played again in 0.10 */
+#if 0
+ gst_pad_set_event_function (chain->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_event));
+ gst_pad_set_chain_function (chain->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_chain));
+#endif
+
gst_object_unref (pad);
gst_element_add_pad (chain->chain.bin, chain->sinkpad);
GstElement *elem;
GstPlayAudioChain *chain;
GstStateChangeReturn ret;
+ GstPlaySinkAudioConvert *conv;
chain = playsink->audiochain;
+ conv = GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv);
chain->chain.raw = raw;
}
g_object_set (chain->conv, "use-volume", FALSE, NULL);
- } else {
- GstPlaySinkAudioConvert *conv =
- GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv);
-
+ } else if (conv) {
/* no volume, we need to add a volume element when we can */
- g_object_set (chain->conv, "use-volume", TRUE, NULL);
+ g_object_set (chain->conv, "use-volume",
+ ! !(playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME), NULL);
GST_DEBUG_OBJECT (playsink, "the sink has no volume property");
/* Disconnect signals */
disconnect_chain (chain, playsink);
- if (conv->volume) {
+ if (conv->volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) {
chain->volume = conv->volume;
chain->mute = chain->volume;
gst_bin_add (bin, chain->resample);
/* this pad will be used for blocking the dataflow and switching the vis
+ * plugin, we block right after the queue, this makes it possible for the
+ * resample and convert to convert to a format supported by the new vis
* plugin */
- chain->blockpad = gst_element_get_static_pad (chain->resample, "src");
+ chain->blockpad = gst_element_get_static_pad (chain->queue, "src");
+ /* this is the pad where the vis is linked to */
+ chain->vispeerpad = gst_element_get_static_pad (chain->resample, "src");
if (playsink->visualisation) {
GST_DEBUG_OBJECT (playsink, "trying configure vis");
post_missing_element_message (playsink, "audioconvert");
GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
(_("Missing element '%s' - check your GStreamer installation."),
- "audioconvert"), ("possibly a liboil version mismatch?"));
+ "audioconvert"), ("make sure audioconvert isn't blacklisted"));
free_chain ((GstPlayChain *) chain);
return NULL;
}
* have to construct the final pipeline. Based on the flags we construct the
* final output pipelines.
*/
-gboolean
-gst_play_sink_reconfigure (GstPlaySink * playsink)
+static gboolean
+gst_play_sink_do_reconfigure (GstPlaySink * playsink)
{
GstPlayFlags flags;
gboolean need_audio, need_video, need_deinterlace, need_vis, need_text;
/* we have a text_pad and we need text rendering, in this case we need a
* video_pad to combine the video with the text or visualizations */
- if (need_text && !need_video) {
+ if (need_text && !need_video && !playsink->text_sink) {
if (playsink->video_pad) {
need_video = TRUE;
} else if (need_audio) {
activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
free_chain ((GstPlayChain *) playsink->videochain);
playsink->videochain = NULL;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element = NULL;
+
+ if (playsink->colorbalance_element) {
+ g_signal_handlers_disconnect_by_func (playsink->colorbalance_element,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ gst_object_unref (playsink->colorbalance_element);
+ }
+ playsink->colorbalance_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
}
}
goto no_chain;
if (!playsink->video_sinkpad_stream_synchronizer) {
+ GValue item = { 0, };
GstIterator *it;
playsink->video_sinkpad_stream_synchronizer =
gst_element_get_request_pad (GST_ELEMENT_CAST
- (playsink->stream_synchronizer), "sink_%d");
+ (playsink->stream_synchronizer), "sink_%u");
it = gst_pad_iterate_internal_links
(playsink->video_sinkpad_stream_synchronizer);
g_assert (it);
- gst_iterator_next (it,
- (gpointer *) & playsink->video_srcpad_stream_synchronizer);
+ gst_iterator_next (it, &item);
+ playsink->video_srcpad_stream_synchronizer = g_value_dup_object (&item);
+ g_value_unset (&item);
g_assert (playsink->video_srcpad_stream_synchronizer);
gst_iterator_free (it);
}
add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE);
activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE);
+ gst_pad_unlink (playsink->video_srcpad_stream_synchronizer,
+ playsink->videochain->sinkpad);
gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
playsink->videodeinterlacechain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
} else {
if (!need_vis && !need_text && (!playsink->textchain
|| !playsink->text_pad)) {
GST_DEBUG_OBJECT (playsink, "ghosting video sinkpad");
+ gst_pad_unlink (playsink->video_srcpad_stream_synchronizer,
+ playsink->videochain->sinkpad);
+ if (playsink->videodeinterlacechain
+ && playsink->videodeinterlacechain->srcpad)
+ gst_pad_unlink (playsink->videodeinterlacechain->srcpad,
+ playsink->videochain->sinkpad);
if (need_deinterlace)
gst_pad_link_full (playsink->videodeinterlacechain->srcpad,
playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
if (playsink->video_pad)
gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL);
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element = NULL;
+
+ if (playsink->colorbalance_element) {
+ g_signal_handlers_disconnect_by_func (playsink->colorbalance_element,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ gst_object_unref (playsink->colorbalance_element);
+ }
+ playsink->colorbalance_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
}
if (need_audio) {
}
if (!playsink->audio_sinkpad_stream_synchronizer) {
+ GValue item = { 0, };
GstIterator *it;
playsink->audio_sinkpad_stream_synchronizer =
gst_element_get_request_pad (GST_ELEMENT_CAST
- (playsink->stream_synchronizer), "sink_%d");
+ (playsink->stream_synchronizer), "sink_%u");
it = gst_pad_iterate_internal_links
(playsink->audio_sinkpad_stream_synchronizer);
g_assert (it);
- gst_iterator_next (it,
- (gpointer *) & playsink->audio_srcpad_stream_synchronizer);
+ gst_iterator_next (it, &item);
+ playsink->audio_srcpad_stream_synchronizer = g_value_dup_object (&item);
+ g_value_unset (&item);
g_assert (playsink->audio_srcpad_stream_synchronizer);
gst_iterator_free (it);
}
GST_DEBUG_OBJECT (playsink, "adding audio chain");
if (playsink->audio_tee_asrc == NULL) {
playsink->audio_tee_asrc =
- gst_element_get_request_pad (playsink->audio_tee, "src%d");
+ gst_element_get_request_pad (playsink->audio_tee, "src_%u");
}
add_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE);
activate_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE);
activate_chain (GST_PLAY_CHAIN (playsink->vischain), TRUE);
if (playsink->audio_tee_vissrc == NULL) {
playsink->audio_tee_vissrc =
- gst_element_get_request_pad (playsink->audio_tee, "src%d");
+ gst_element_get_request_pad (playsink->audio_tee, "src_%u");
}
gst_pad_link_full (playsink->audio_tee_vissrc,
playsink->vischain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
add_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE);
if (!playsink->text_sinkpad_stream_synchronizer) {
+ GValue item = { 0, };
+
playsink->text_sinkpad_stream_synchronizer =
gst_element_get_request_pad (GST_ELEMENT_CAST
- (playsink->stream_synchronizer), "sink_%d");
+ (playsink->stream_synchronizer), "sink_%u");
it = gst_pad_iterate_internal_links
(playsink->text_sinkpad_stream_synchronizer);
g_assert (it);
- gst_iterator_next (it,
- (gpointer *) & playsink->text_srcpad_stream_synchronizer);
+ gst_iterator_next (it, &item);
+ playsink->text_srcpad_stream_synchronizer = g_value_dup_object (&item);
+ g_value_unset (&item);
g_assert (playsink->text_srcpad_stream_synchronizer);
gst_iterator_free (it);
playsink->textchain->textsinkpad, GST_PAD_LINK_CHECK_NOTHING);
}
- if (need_vis) {
- GstPad *srcpad;
+ if (need_vis || need_video) {
+ if (need_vis) {
+ GstPad *srcpad;
- srcpad =
- gst_element_get_static_pad (playsink->vischain->chain.bin, "src");
- gst_pad_unlink (srcpad, playsink->videochain->sinkpad);
- gst_pad_link_full (srcpad, playsink->textchain->videosinkpad,
- GST_PAD_LINK_CHECK_NOTHING);
- gst_object_unref (srcpad);
- } else {
- if (need_deinterlace)
- gst_pad_link_full (playsink->videodeinterlacechain->srcpad,
- playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
- else
- gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
- playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ srcpad =
+ gst_element_get_static_pad (playsink->vischain->chain.bin, "src");
+ gst_pad_unlink (srcpad, playsink->videochain->sinkpad);
+ gst_pad_link_full (srcpad, playsink->textchain->videosinkpad,
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (srcpad);
+ } else {
+ if (need_deinterlace)
+ gst_pad_link_full (playsink->videodeinterlacechain->srcpad,
+ playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ else
+ gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
+ playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ }
+ gst_pad_link_full (playsink->textchain->srcpad,
+ playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
}
- gst_pad_link_full (playsink->textchain->srcpad,
- playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
activate_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE);
}
vchain = (GstPlayVideoChain *) playsink->videochain;
if (achain && vchain && achain->ts_offset && vchain->ts_offset) {
- g_object_set (achain->ts_offset, "ts-offset", MAX (0, -av_offset), NULL);
- g_object_set (vchain->ts_offset, "ts-offset", MAX (0, av_offset), NULL);
+ g_object_set (achain->ts_offset,
+ "ts-offset", MAX (G_GINT64_CONSTANT (0), -av_offset), NULL);
+ g_object_set (vchain->ts_offset,
+ "ts-offset", MAX (G_GINT64_CONSTANT (0), av_offset), NULL);
} else {
GST_LOG_OBJECT (playsink, "no ts_offset elements");
}
}
/**
- * gst_play_sink_get_last_frame:
+ * gst_play_sink_get_last_sample:
* @playsink: a #GstPlaySink
*
- * Get the last displayed frame from @playsink. This frame is in the native
- * format of the sink element, the caps on the result buffer contain the format
+ * Get the last displayed sample from @playsink. This sample is in the native
+ * format of the sink element, the caps in the result sample contain the format
* of the frame data.
*
- * Returns: a #GstBuffer with the frame data or %NULL when no video frame is
+ * Returns: a #GstSample with the frame data or %NULL when no video frame is
* available.
*/
-GstBuffer *
-gst_play_sink_get_last_frame (GstPlaySink * playsink)
+GstSample *
+gst_play_sink_get_last_sample (GstPlaySink * playsink)
{
- GstBuffer *result = NULL;
+ GstSample *result = NULL;
GstPlayVideoChain *chain;
GST_PLAY_SINK_LOCK (playsink);
- GST_DEBUG_OBJECT (playsink, "taking last frame");
+ GST_DEBUG_OBJECT (playsink, "taking last sample");
/* get the video chain if we can */
if ((chain = (GstPlayVideoChain *) playsink->videochain)) {
GST_DEBUG_OBJECT (playsink, "found video chain");
/* find and get the last-buffer property now */
if ((elem =
gst_play_sink_find_property (playsink, chain->sink,
- "last-buffer", GST_TYPE_BUFFER))) {
- GST_DEBUG_OBJECT (playsink, "getting last-buffer property");
- g_object_get (elem, "last-buffer", &result, NULL);
+ "last-sample", GST_TYPE_SAMPLE))) {
+ GST_DEBUG_OBJECT (playsink, "getting last-sample property");
+ g_object_get (elem, "last-sample", &result, NULL);
gst_object_unref (elem);
}
}
}
/**
- * gst_play_sink_convert_frame:
+ * gst_play_sink_convert_sample:
* @playsink: a #GstPlaySink
* @caps: a #GstCaps
*
* Returns: a #GstBuffer with the frame data or %NULL when no video frame is
* available or when the conversion failed.
*/
-GstBuffer *
-gst_play_sink_convert_frame (GstPlaySink * playsink, GstCaps * caps)
+GstSample *
+gst_play_sink_convert_sample (GstPlaySink * playsink, GstCaps * caps)
{
- GstBuffer *result;
+ GstSample *result;
+ GError *err = NULL;
- result = gst_play_sink_get_last_frame (playsink);
+ result = gst_play_sink_get_last_sample (playsink);
if (result != NULL && caps != NULL) {
- GstBuffer *temp;
- GError *err = NULL;
-
- temp = gst_video_convert_frame (result, caps, 25 * GST_SECOND, &err);
- gst_buffer_unref (result);
- if (temp == NULL && err) {
- /* I'm really uncertain whether we should make playsink post an error
- * on the bus or not. It's not like it's a critical issue regarding
- * playsink behaviour. */
- GST_ERROR ("Error converting frame: %s", err->message);
- g_error_free (err);
- }
+ GstSample *temp;
+
+ temp = gst_video_convert_sample (result, caps, 25 * GST_SECOND, &err);
+ if (temp == NULL && err)
+ goto error;
+
+ gst_sample_unref (result);
result = temp;
}
return result;
+
+ /* ERRORS */
+error:
+ {
+ /* I'm really uncertain whether we should make playsink post an error
+ * on the bus or not. It's not like it's a critical issue regarding
+ * playsink behaviour. */
+ GST_ERROR ("Error converting frame: %s", err->message);
+ gst_sample_unref (result);
+ g_error_free (err);
+ return NULL;
+ }
}
static gboolean
name = gst_structure_get_name (s);
- if (g_str_has_prefix (name, "video/x-raw-") ||
- g_str_has_prefix (name, "audio/x-raw-"))
+ if (g_str_equal (name, "video/x-raw") || g_str_equal (name, "audio/x-raw"))
return TRUE;
return FALSE;
}
if (!peer)
return raw;
- caps = gst_pad_get_negotiated_caps (peer);
+ caps = gst_pad_get_current_caps (peer);
if (!caps) {
guint i, n;
- caps = gst_pad_get_caps_reffed (peer);
+ caps = gst_pad_query_caps (peer, NULL);
n = gst_caps_get_size (caps);
for (i = 0; i < n; i++) {
return raw;
}
+static GstPadProbeReturn
+sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
+ gpointer user_data);
+
static void
-sinkpad_blocked_cb (GstPad * blockedpad, gboolean blocked, gpointer user_data)
+video_set_blocked (GstPlaySink * playsink, gboolean blocked)
{
- GstPlaySink *playsink = (GstPlaySink *) user_data;
- GstPad *pad;
-
- GST_PLAY_SINK_LOCK (playsink);
-
- pad = GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (blockedpad)));
- if (pad == playsink->video_pad) {
- playsink->video_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Video pad blocked: %d", blocked);
- if (!blocked) {
+ if (playsink->video_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->video_pad)));
+ if (blocked && playsink->video_block_id == 0) {
+ playsink->video_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->video_block_id) {
+ gst_pad_remove_probe (opad, playsink->video_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO_RAW);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO);
+ playsink->video_block_id = 0;
+ playsink->video_pad_blocked = FALSE;
}
- } else if (pad == playsink->audio_pad) {
- playsink->audio_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Audio pad blocked: %d", blocked);
- if (!blocked) {
+ gst_object_unref (opad);
+ }
+}
+
+static void
+audio_set_blocked (GstPlaySink * playsink, gboolean blocked)
+{
+ if (playsink->audio_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->audio_pad)));
+ if (blocked && playsink->audio_block_id == 0) {
+ playsink->audio_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->audio_block_id) {
+ gst_pad_remove_probe (opad, playsink->audio_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO_RAW);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO);
+ playsink->audio_block_id = 0;
+ playsink->audio_pad_blocked = FALSE;
}
- } else if (pad == playsink->text_pad) {
- playsink->text_pad_blocked = blocked;
- GST_DEBUG_OBJECT (pad, "Text pad blocked: %d", blocked);
- if (!blocked)
+ gst_object_unref (opad);
+ }
+}
+
+static void
+text_set_blocked (GstPlaySink * playsink, gboolean blocked)
+{
+ if (playsink->text_pad) {
+ GstPad *opad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
+ (playsink->text_pad)));
+ if (blocked && playsink->text_block_id == 0) {
+ playsink->text_block_id =
+ gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ sinkpad_blocked_cb, playsink, NULL);
+ } else if (!blocked && playsink->text_block_id) {
+ gst_pad_remove_probe (opad, playsink->text_block_id);
PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_TEXT);
+ playsink->text_block_id = 0;
+ playsink->text_pad_blocked = FALSE;
+ }
+ gst_object_unref (opad);
}
+}
- if (!blocked) {
- gst_object_unref (pad);
- GST_PLAY_SINK_UNLOCK (playsink);
- return;
+gboolean
+gst_play_sink_reconfigure (GstPlaySink * playsink)
+{
+ GST_LOG_OBJECT (playsink, "Triggering reconfiguration");
+
+ GST_PLAY_SINK_LOCK (playsink);
+ video_set_blocked (playsink, TRUE);
+ audio_set_blocked (playsink, TRUE);
+ text_set_blocked (playsink, TRUE);
+ GST_PLAY_SINK_UNLOCK (playsink);
+
+ return TRUE;
+}
+
+static GstPadProbeReturn
+sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
+ gpointer user_data)
+{
+ GstPlaySink *playsink = (GstPlaySink *) user_data;
+ GstPad *pad;
+
+ GST_PLAY_SINK_LOCK (playsink);
+
+ pad = GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (blockedpad)));
+ if (pad == playsink->video_pad) {
+ playsink->video_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Video pad blocked");
+ } else if (pad == playsink->audio_pad) {
+ playsink->audio_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Audio pad blocked");
+ } else if (pad == playsink->text_pad) {
+ playsink->text_pad_blocked = TRUE;
+ GST_DEBUG_OBJECT (pad, "Text pad blocked");
}
/* We reconfigure when for ALL streams:
playsink->audio_pad_raw);
}
- gst_play_sink_reconfigure (playsink);
+ gst_play_sink_do_reconfigure (playsink);
- if (playsink->video_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->video_pad)));
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
-
- if (playsink->audio_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->audio_pad)));
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
-
- if (playsink->text_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->text_pad)));
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
+ video_set_blocked (playsink, FALSE);
+ audio_set_blocked (playsink, FALSE);
+ text_set_blocked (playsink, FALSE);
}
gst_object_unref (pad);
GST_PLAY_SINK_UNLOCK (playsink);
+
+ return GST_PAD_PROBE_OK;
}
static void
gst_caps_unref (caps);
- if (reconfigure) {
- GST_PLAY_SINK_LOCK (playsink);
- if (playsink->video_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->video_pad)));
- gst_pad_set_blocked_async_full (opad, TRUE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
+ if (reconfigure)
+ gst_play_sink_reconfigure (playsink);
+}
- if (playsink->audio_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->audio_pad)));
- gst_pad_set_blocked_async_full (opad, TRUE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
-
- if (playsink->text_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->text_pad)));
- gst_pad_set_blocked_async_full (opad, TRUE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- gst_object_unref (opad);
- }
+void
+gst_play_sink_refresh_pad (GstPlaySink * playsink, GstPad * pad,
+ GstPlaySinkType type)
+{
+ gulong *block_id = NULL;
+
+ GST_DEBUG_OBJECT (playsink, "refresh pad %" GST_PTR_FORMAT, pad);
+
+ GST_PLAY_SINK_LOCK (playsink);
+ if (pad == playsink->video_pad) {
+ if (type != GST_PLAY_SINK_TYPE_VIDEO_RAW &&
+ type != GST_PLAY_SINK_TYPE_VIDEO)
+ goto wrong_type;
+ block_id = &playsink->video_block_id;
+ } else if (pad == playsink->audio_pad) {
+ if (type != GST_PLAY_SINK_TYPE_AUDIO_RAW &&
+ type != GST_PLAY_SINK_TYPE_AUDIO)
+ goto wrong_type;
+ block_id = &playsink->audio_block_id;
+ } else if (pad == playsink->text_pad) {
+ if (type != GST_PLAY_SINK_TYPE_TEXT)
+ goto wrong_type;
+ block_id = &playsink->text_block_id;
+ }
+
+ if (type != GST_PLAY_SINK_TYPE_FLUSHING && (block_id && *block_id == 0)) {
+ GstPad *blockpad =
+ GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (pad)));
+
+ *block_id =
+ gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ sinkpad_blocked_cb, playsink, NULL);
+ PENDING_FLAG_SET (playsink, type);
+ gst_object_unref (blockpad);
+ }
+ GST_PLAY_SINK_UNLOCK (playsink);
+
+ return;
+
+ /* ERRORS */
+wrong_type:
+ {
+ GST_WARNING_OBJECT (playsink, "wrong type %u for pad %" GST_PTR_FORMAT,
+ type, pad);
GST_PLAY_SINK_UNLOCK (playsink);
+ return;
}
}
gboolean created = FALSE;
gboolean activate = TRUE;
const gchar *pad_name = NULL;
+ gulong *block_id = NULL;
GST_DEBUG_OBJECT (playsink, "request pad type %d", type);
}
playsink->audio_pad_raw = FALSE;
res = playsink->audio_pad;
+ block_id = &playsink->audio_block_id;
break;
case GST_PLAY_SINK_TYPE_VIDEO_RAW:
case GST_PLAY_SINK_TYPE_VIDEO:
}
playsink->video_pad_raw = FALSE;
res = playsink->video_pad;
+ block_id = &playsink->video_block_id;
break;
case GST_PLAY_SINK_TYPE_TEXT:
GST_LOG_OBJECT (playsink, "ghosting text");
created = TRUE;
}
res = playsink->text_pad;
+ block_id = &playsink->text_block_id;
break;
case GST_PLAY_SINK_TYPE_FLUSHING:
{
gchar *padname;
/* we need a unique padname for the flushing pad. */
- padname = g_strdup_printf ("flushing_%d", playsink->count);
+ padname = g_strdup_printf ("flushing_%u", playsink->count);
res = gst_ghost_pad_new_no_target (padname, GST_PAD_SINK);
g_free (padname);
playsink->count++;
* element is 'running' */
gst_pad_set_active (res, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (playsink), res);
- if (type != GST_PLAY_SINK_TYPE_FLUSHING) {
+ if (block_id && *block_id == 0) {
GstPad *blockpad =
GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (res)));
- gst_pad_set_blocked_async_full (blockpad, TRUE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
+ *block_id =
+ gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ sinkpad_blocked_cb, playsink, NULL);
PENDING_FLAG_SET (playsink, type);
gst_object_unref (blockpad);
}
return res;
}
+
static GstPad *
gst_play_sink_request_new_pad (GstElement * element, GstPadTemplate * templ,
- const gchar * name)
+ const gchar * name, const GstCaps * caps)
{
GstPlaySink *psink;
GstPad *pad;
res = &playsink->video_pad;
g_signal_handlers_disconnect_by_func (playsink->video_pad, caps_notify_cb,
playsink);
+ video_set_blocked (playsink, FALSE);
} else if (pad == playsink->audio_pad) {
res = &playsink->audio_pad;
g_signal_handlers_disconnect_by_func (playsink->audio_pad, caps_notify_cb,
playsink);
+ audio_set_blocked (playsink, FALSE);
} else if (pad == playsink->text_pad) {
res = &playsink->text_pad;
+ text_set_blocked (playsink, FALSE);
} else {
/* try to release the given pad anyway, these could be the FLUSHING pads. */
res = &pad;
GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message);
break;
}
+ case GST_MESSAGE_ELEMENT:{
+ if (gst_is_video_overlay_prepare_window_handle_message (message)) {
+ GstVideoOverlay *overlay;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element
+ && GST_OBJECT_CAST (playsink->overlay_element) !=
+ GST_MESSAGE_SRC (message)) {
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element = NULL;
+ }
+
+ if (!playsink->overlay_element)
+ playsink->overlay_element =
+ GST_VIDEO_OVERLAY (gst_object_ref (GST_MESSAGE_SRC (message)));
+ overlay =
+ GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ GST_OBJECT_UNLOCK (playsink);
+
+ GST_DEBUG_OBJECT (playsink, "Got prepare-xwindow-id message");
+
+ if (playsink->overlay_handle_set)
+ gst_video_overlay_set_window_handle (playsink->overlay_element,
+ playsink->overlay_handle);
+ if (playsink->overlay_handle_events_set)
+ gst_video_overlay_handle_events (playsink->overlay_element,
+ playsink->overlay_handle_events);
+ if (playsink->overlay_render_rectangle_set)
+ gst_video_overlay_set_render_rectangle (playsink->overlay_element,
+ playsink->overlay_x, playsink->overlay_y,
+ playsink->overlay_width, playsink->overlay_height);
+
+ gst_object_unref (overlay);
+ gst_message_unref (message);
+ gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (playsink));
+ } else {
+ GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin,
+ message);
+ }
+ break;
+ }
default:
GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message);
break;
gst_play_sink_send_event_to_sink (GstPlaySink * playsink, GstEvent * event)
{
gboolean res = TRUE;
-
- if (playsink->textchain && playsink->textchain->sink) {
- gst_event_ref (event);
- if ((res = gst_element_send_event (playsink->textchain->chain.bin, event))) {
- GST_DEBUG_OBJECT (playsink, "Sent event successfully to text sink");
- } else {
- GST_DEBUG_OBJECT (playsink, "Event failed when sent to text sink");
+ if (playsink->send_event_mode == MODE_FIRST) {
+ if (playsink->textchain && playsink->textchain->sink) {
+ gst_event_ref (event);
+ if ((res =
+ gst_element_send_event (playsink->textchain->chain.bin, event))) {
+ GST_DEBUG_OBJECT (playsink, "Sent event successfully to text sink");
+ } else {
+ GST_DEBUG_OBJECT (playsink, "Event failed when sent to text sink");
+ }
}
- }
- if (playsink->videochain) {
- gst_event_ref (event);
- if ((res = gst_element_send_event (playsink->videochain->chain.bin, event))) {
- GST_DEBUG_OBJECT (playsink, "Sent event successfully to video sink");
- goto done;
+ if (playsink->videochain) {
+ gst_event_ref (event);
+ if ((res =
+ gst_element_send_event (playsink->videochain->chain.bin,
+ event))) {
+ GST_DEBUG_OBJECT (playsink, "Sent event successfully to video sink");
+ goto done;
+ }
+ GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink");
}
- GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink");
- }
- if (playsink->audiochain) {
- gst_event_ref (event);
- if ((res = gst_element_send_event (playsink->audiochain->chain.bin, event))) {
- GST_DEBUG_OBJECT (playsink, "Sent event successfully to audio sink");
- goto done;
+ if (playsink->audiochain) {
+ gst_event_ref (event);
+ if ((res =
+ gst_element_send_event (playsink->audiochain->chain.bin,
+ event))) {
+ GST_DEBUG_OBJECT (playsink, "Sent event successfully to audio sink");
+ goto done;
+ }
+ GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink");
}
- GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink");
+ } else {
+ return
+ GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event
+ (GST_ELEMENT_CAST (playsink), event);
}
done:
gboolean res = FALSE;
GstEventType event_type = GST_EVENT_TYPE (event);
GstPlaySink *playsink;
-
playsink = GST_PLAY_SINK_CAST (element);
-
switch (event_type) {
case GST_EVENT_SEEK:
GST_DEBUG_OBJECT (element, "Sending event to a sink");
guint64 amount;
gdouble rate;
gboolean flush, intermediate;
-
gst_event_parse_step (event, &format, &amount, &rate, &flush,
&intermediate);
-
if (format == GST_FORMAT_BUFFERS) {
/* for buffers, we will try to step video frames, for other formats we
* send the step to all sinks */
{
GstStateChangeReturn ret;
GstStateChangeReturn bret;
-
GstPlaySink *playsink;
-
playsink = GST_PLAY_SINK (element);
-
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_segment_init (&playsink->text_segment, GST_FORMAT_UNDEFINED);
+
playsink->need_async_start = TRUE;
/* we want to go async to PAUSED until we managed to configure and add the
* sinks */
do_async_start (playsink);
ret = GST_STATE_CHANGE_ASYNC;
+
+ /* block all pads here */
+ if (!gst_play_sink_reconfigure (playsink))
+ ret = GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* unblock all pads here */
GST_PLAY_SINK_LOCK (playsink);
- if (playsink->video_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->video_pad)));
- if (gst_pad_is_blocked (opad)) {
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- }
- gst_object_unref (opad);
- playsink->video_pad_blocked = FALSE;
- }
-
- if (playsink->audio_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->audio_pad)));
-
- if (gst_pad_is_blocked (opad)) {
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- }
- gst_object_unref (opad);
- playsink->audio_pad_blocked = FALSE;
- }
-
- if (playsink->text_pad) {
- GstPad *opad =
- GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
- (playsink->text_pad)));
- if (gst_pad_is_blocked (opad)) {
- gst_pad_set_blocked_async_full (opad, FALSE, sinkpad_blocked_cb,
- gst_object_ref (playsink), (GDestroyNotify) gst_object_unref);
- }
- gst_object_unref (opad);
- playsink->text_pad_blocked = FALSE;
- }
+ video_set_blocked (playsink, FALSE);
+ audio_set_blocked (playsink, FALSE);
+ text_set_blocked (playsink, FALSE);
GST_PLAY_SINK_UNLOCK (playsink);
/* fall through */
case GST_STATE_CHANGE_READY_TO_NULL:
gst_object_unref (playsink->videochain->ts_offset);
playsink->videochain->ts_offset = NULL;
}
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ gst_object_unref (playsink->overlay_element);
+ playsink->overlay_element = NULL;
+
+ if (playsink->colorbalance_element) {
+ g_signal_handlers_disconnect_by_func (playsink->colorbalance_element,
+ G_CALLBACK (colorbalance_value_changed_cb), playsink);
+ gst_object_unref (playsink->colorbalance_element);
+ }
+ playsink->colorbalance_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
ret = GST_STATE_CHANGE_SUCCESS;
break;
default:
if (playsink->textchain && playsink->textchain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->textchain->chain.bin),
playsink->textchain->sink);
-
if (playsink->audio_sink != NULL)
gst_element_set_state (playsink->audio_sink, GST_STATE_NULL);
if (playsink->video_sink != NULL)
gst_element_set_state (playsink->visualisation, GST_STATE_NULL);
if (playsink->text_sink != NULL)
gst_element_set_state (playsink->text_sink, GST_STATE_NULL);
-
free_chain ((GstPlayChain *) playsink->videodeinterlacechain);
playsink->videodeinterlacechain = NULL;
free_chain ((GstPlayChain *) playsink->videochain);
break;
}
return ret;
-
/* ERRORS */
activate_failed:
{
const GValue * value, GParamSpec * spec)
{
GstPlaySink *playsink = GST_PLAY_SINK (object);
-
switch (prop_id) {
case PROP_FLAGS:
gst_play_sink_set_flags (playsink, g_value_get_flags (value));
gst_play_sink_set_sink (playsink, GST_PLAY_SINK_TYPE_TEXT,
g_value_get_object (value));
break;
+ case PROP_SEND_EVENT_MODE:
+ playsink->send_event_mode = g_value_get_enum (value);
+ break;
+ case PROP_FORCE_ASPECT_RATIO:{
+ GstPlayVideoChain *chain;
+ GstElement *elem;
+
+ playsink->force_aspect_ratio = g_value_get_boolean (value);
+
+ GST_PLAY_SINK_LOCK (playsink);
+ if (playsink->videochain) {
+ chain = (GstPlayVideoChain *) playsink->videochain;
+
+ if (chain->sink) {
+ elem =
+ gst_play_sink_find_property_sinks (playsink, chain->sink,
+ "force-aspect-ratio", G_TYPE_BOOLEAN);
+
+ if (elem)
+ g_object_set (elem, "force-aspect-ratio",
+ playsink->force_aspect_ratio, NULL);
+ }
+ }
+ GST_PLAY_SINK_UNLOCK (playsink);
+ break;
+ }
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec);
break;
GValue * value, GParamSpec * spec)
{
GstPlaySink *playsink = GST_PLAY_SINK (object);
-
switch (prop_id) {
case PROP_FLAGS:
g_value_set_flags (value, gst_play_sink_get_flags (playsink));
case PROP_VIS_PLUGIN:
g_value_take_object (value, gst_play_sink_get_vis_plugin (playsink));
break;
- case PROP_FRAME:
- gst_value_take_buffer (value, gst_play_sink_get_last_frame (playsink));
+ case PROP_SAMPLE:
+ gst_value_take_sample (value, gst_play_sink_get_last_sample (playsink));
break;
case PROP_AV_OFFSET:
g_value_set_int64 (value, gst_play_sink_get_av_offset (playsink));
g_value_take_object (value, gst_play_sink_get_sink (playsink,
GST_PLAY_SINK_TYPE_TEXT));
break;
+ case PROP_SEND_EVENT_MODE:
+ g_value_set_enum (value, playsink->send_event_mode);
+ break;
+ case PROP_FORCE_ASPECT_RATIO:
+ g_value_set_boolean (value, playsink->force_aspect_ratio);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec);
break;
}
}
+static void
+gst_play_sink_overlay_expose (GstVideoOverlay * overlay)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (overlay);
+ GstVideoOverlay *overlay_element;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ overlay_element =
+ GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ else
+ overlay_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (overlay_element) {
+ gst_video_overlay_expose (overlay_element);
+ gst_object_unref (overlay_element);
+ }
+}
+
+static void
+gst_play_sink_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (overlay);
+ GstVideoOverlay *overlay_element;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ overlay_element =
+ GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ else
+ overlay_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
+ playsink->overlay_handle_events_set = TRUE;
+ playsink->overlay_handle_events = handle_events;
+
+ if (overlay_element) {
+ gst_video_overlay_handle_events (overlay_element, handle_events);
+ gst_object_unref (overlay_element);
+ }
+}
+
+static void
+gst_play_sink_overlay_set_render_rectangle (GstVideoOverlay * overlay, gint x,
+ gint y, gint width, gint height)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (overlay);
+ GstVideoOverlay *overlay_element;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ overlay_element =
+ GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ else
+ overlay_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
+ playsink->overlay_render_rectangle_set = TRUE;
+ playsink->overlay_x = x;
+ playsink->overlay_y = y;
+ playsink->overlay_width = width;
+ playsink->overlay_height = height;
+
+ if (overlay_element) {
+ gst_video_overlay_set_render_rectangle (overlay_element, x, y, width,
+ height);
+ gst_object_unref (overlay_element);
+ }
+}
+
+static void
+gst_play_sink_overlay_set_window_handle (GstVideoOverlay * overlay,
+ guintptr handle)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (overlay);
+ GstVideoOverlay *overlay_element;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->overlay_element)
+ overlay_element =
+ GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
+ else
+ overlay_element = NULL;
+ GST_OBJECT_UNLOCK (playsink);
+
+ playsink->overlay_handle_set = TRUE;
+ playsink->overlay_handle = handle;
+
+ if (overlay_element) {
+ gst_video_overlay_set_window_handle (overlay_element, handle);
+ gst_object_unref (overlay_element);
+ }
+}
+
+static void
+gst_play_sink_overlay_init (gpointer g_iface, gpointer g_iface_data)
+{
+ GstVideoOverlayInterface *iface = (GstVideoOverlayInterface *) g_iface;
+ iface->expose = gst_play_sink_overlay_expose;
+ iface->handle_events = gst_play_sink_overlay_handle_events;
+ iface->set_render_rectangle = gst_play_sink_overlay_set_render_rectangle;
+ iface->set_window_handle = gst_play_sink_overlay_set_window_handle;
+}
+
+static void
+gst_play_sink_navigation_send_event (GstNavigation * navigation,
+ GstStructure * structure)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (navigation);
+ GstBin *bin = NULL;
+
+ GST_PLAY_SINK_LOCK (playsink);
+ if (playsink->videochain && playsink->videochain->chain.bin)
+ bin = GST_BIN (gst_object_ref (playsink->videochain->chain.bin));
+ GST_PLAY_SINK_UNLOCK (playsink);
+
+ if (bin) {
+ GstElement *nav = gst_bin_get_by_interface (bin, GST_TYPE_NAVIGATION);
+
+ if (nav) {
+ gst_navigation_send_event (GST_NAVIGATION (nav), structure);
+ structure = NULL;
+ gst_object_unref (nav);
+ } else {
+ GstEvent *event = gst_event_new_navigation (structure);
+ structure = NULL;
+ gst_element_send_event (GST_ELEMENT (bin), event);
+ }
+
+ gst_object_unref (bin);
+ }
+
+ if (structure)
+ gst_structure_free (structure);
+}
+
+static void
+gst_play_sink_navigation_init (gpointer g_iface, gpointer g_iface_data)
+{
+ GstNavigationInterface *iface = (GstNavigationInterface *) g_iface;
+
+ iface->send_event = gst_play_sink_navigation_send_event;
+}
+
+static const GList *
+gst_play_sink_colorbalance_list_channels (GstColorBalance * balance)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (balance);
+
+ return playsink->colorbalance_channels;
+}
+
+static void
+gst_play_sink_colorbalance_set_value (GstColorBalance * balance,
+ GstColorBalanceChannel * proxy, gint value)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (balance);
+ GList *l;
+ gint i;
+ GstColorBalance *balance_element = NULL;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->colorbalance_element)
+ balance_element =
+ GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
+ GST_OBJECT_UNLOCK (playsink);
+
+ for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
+ GstColorBalanceChannel *proxy_tmp = l->data;
+ gdouble new_val;
+
+ if (proxy_tmp != proxy)
+ continue;
+
+ playsink->colorbalance_values[i] = value;
+
+ if (balance_element) {
+ GstColorBalanceChannel *channel = NULL;
+ const GList *channels, *k;
+
+ channels = gst_color_balance_list_channels (balance_element);
+ for (k = channels; k; k = k->next) {
+ GstColorBalanceChannel *tmp = l->data;
+
+ if (g_strrstr (tmp->label, proxy->label)) {
+ channel = tmp;
+ break;
+ }
+ }
+
+ g_assert (channel);
+
+ /* Convert to [0, 1] range */
+ new_val =
+ ((gdouble) value -
+ (gdouble) proxy->min_value) / ((gdouble) proxy->max_value -
+ (gdouble) proxy->min_value);
+ /* Convert to channel range */
+ new_val =
+ channel->min_value + new_val * ((gdouble) channel->max_value -
+ (gdouble) channel->min_value);
+
+ gst_color_balance_set_value (balance_element, channel,
+ (gint) (new_val + 0.5));
+
+ gst_object_unref (balance_element);
+ }
+
+ gst_color_balance_value_changed (balance, proxy, value);
+ break;
+ }
+}
+
+static gint
+gst_play_sink_colorbalance_get_value (GstColorBalance * balance,
+ GstColorBalanceChannel * proxy)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (balance);
+ GList *l;
+ gint i;
+
+ for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
+ GstColorBalanceChannel *proxy_tmp = l->data;
+
+ if (proxy_tmp != proxy)
+ continue;
+
+ return playsink->colorbalance_values[i];
+ }
+
+ g_return_val_if_reached (0);
+}
+
+static GstColorBalanceType
+gst_play_sink_colorbalance_get_balance_type (GstColorBalance * balance)
+{
+ GstPlaySink *playsink = GST_PLAY_SINK (balance);
+ GstColorBalance *balance_element = NULL;
+ GstColorBalanceType t = GST_COLOR_BALANCE_SOFTWARE;
+
+ GST_OBJECT_LOCK (playsink);
+ if (playsink->colorbalance_element)
+ balance_element =
+ GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
+ GST_OBJECT_UNLOCK (playsink);
+
+ if (balance_element) {
+ t = gst_color_balance_get_balance_type (balance_element);
+ gst_object_unref (balance_element);
+ }
+
+ return t;
+}
+
+static void
+gst_play_sink_colorbalance_init (gpointer g_iface, gpointer g_iface_data)
+{
+ GstColorBalanceInterface *iface = (GstColorBalanceInterface *) g_iface;
+
+ iface->list_channels = gst_play_sink_colorbalance_list_channels;
+ iface->set_value = gst_play_sink_colorbalance_set_value;
+ iface->get_value = gst_play_sink_colorbalance_get_value;
+ iface->get_balance_type = gst_play_sink_colorbalance_get_balance_type;
+}
gboolean
gst_play_sink_plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_play_sink_debug, "playsink", 0, "play bin");
-
return gst_element_register (plugin, "playsink", GST_RANK_NONE,
GST_TYPE_PLAY_SINK);
}