X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=gst%2Fplayback%2Fgstplaysink.c;h=475ae2fa431991325336ab4c47de82d28387542e;hb=f19acfc60b4949564f61cf5c0941ac96725974af;hp=abad050703786f16c454c3750fa924e36e5e12c8;hpb=b950b930aeae4cc2fcb266eed1c431236701b9c2;p=platform%2Fupstream%2Fgstreamer.git diff --git a/gst/playback/gstplaysink.c b/gst/playback/gstplaysink.c index abad050..475ae2f 100644 --- a/gst/playback/gstplaysink.c +++ b/gst/playback/gstplaysink.c @@ -14,24 +14,24 @@ * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the - * Free Software Foundation, Inc., 59 Temple Place - Suite 330, - * Boston, MA 02111-1307, USA. + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif -/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex - * with newer GLib versions (>= 2.31.0) */ -#define GLIB_DISABLE_DEPRECATION_WARNINGS - #include #include #include #include #include +#include +#include +#include +#include #include "gstplaysink.h" #include "gststreamsynchronizer.h" @@ -44,10 +44,45 @@ GST_DEBUG_CATEGORY_STATIC (gst_play_sink_debug); #define VOLUME_MAX_DOUBLE 10.0 #define DEFAULT_FLAGS GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_TEXT | \ - GST_PLAY_FLAG_SOFT_VOLUME + GST_PLAY_FLAG_SOFT_VOLUME | GST_PLAY_FLAG_SOFT_COLORBALANCE #define GST_PLAY_CHAIN(c) ((GstPlayChain *)(c)) +/* enum types */ +/** + * GstPlaySinkSendEventMode: + * @MODE_DEFAULT: default GstBin's send_event handling + * @MODE_FIRST: send event only to the first sink that return true + * + * Send event handling to use + */ +typedef enum +{ + MODE_DEFAULT = 0, + MODE_FIRST = 1 +} GstPlaySinkSendEventMode; + + +#define GST_TYPE_PLAY_SINK_SEND_EVENT_MODE (gst_play_sink_send_event_mode_get_type ()) +static GType +gst_play_sink_send_event_mode_get_type (void) +{ + static GType gtype = 0; + + if (gtype == 0) { + static const GEnumValue values[] = { + {MODE_DEFAULT, "Default GstBin's send_event handling (default)", + "default"}, + {MODE_FIRST, "Sends the event to sinks until the first one handles it", + "first"}, + {0, NULL, NULL} + }; + + gtype = g_enum_register_static ("GstPlaySinkSendEventMode", values); + } + return gtype; +} + /* holds the common data fields for the audio and video pipelines. We keep them * in a structure to more easily have all the info available. */ typedef struct @@ -98,7 +133,8 @@ typedef struct GstElement *queue; GstElement *conv; GstElement *resample; - GstPad *blockpad; /* srcpad of resample, used for switching the vis */ + GstPad *blockpad; /* srcpad of queue, used for blocking the vis */ + GstPad *vispeerpad; /* srcpad of resample, used for unlinking the vis */ GstPad *vissinkpad; /* visualisation sinkpad, */ GstElement *vis; GstPad *vissrcpad; /* visualisation srcpad, */ @@ -123,12 +159,12 @@ typedef struct #define GST_PLAY_SINK_GET_LOCK(playsink) (&((GstPlaySink *)playsink)->lock) #define GST_PLAY_SINK_LOCK(playsink) G_STMT_START { \ GST_LOG_OBJECT (playsink, "locking from thread %p", g_thread_self ()); \ - g_static_rec_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)); \ + g_rec_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)); \ GST_LOG_OBJECT (playsink, "locked from thread %p", g_thread_self ()); \ } G_STMT_END #define GST_PLAY_SINK_UNLOCK(playsink) G_STMT_START { \ GST_LOG_OBJECT (playsink, "unlocking from thread %p", g_thread_self ()); \ - g_static_rec_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)); \ + g_rec_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)); \ } G_STMT_END #define PENDING_FLAG_SET(playsink, flagtype) \ @@ -148,7 +184,7 @@ struct _GstPlaySink { GstBin bin; - GStaticRecMutex lock; + GRecMutex lock; gboolean async_pending; gboolean need_async_start; @@ -206,6 +242,41 @@ struct _GstPlaySink gboolean volume_changed; /* volume/mute changed while no audiochain */ gboolean mute_changed; /* ... has been created yet */ gint64 av_offset; + GstPlaySinkSendEventMode send_event_mode; + gboolean force_aspect_ratio; + + /* videooverlay proxy interface */ + GstVideoOverlay *overlay_element; /* protected with LOCK */ + gboolean overlay_handle_set; + guintptr overlay_handle; + gboolean overlay_render_rectangle_set; + gint overlay_x, overlay_y, overlay_width, overlay_height; + gboolean overlay_handle_events_set; + gboolean overlay_handle_events; + + /* colorbalance proxy interface */ + GstColorBalance *colorbalance_element; + GList *colorbalance_channels; /* CONTRAST, BRIGHTNESS, HUE, SATURATION */ + gint colorbalance_values[4]; + + /* sending audio/video flushes break stream changes when the pipeline + * is paused and played again in 0.10 */ +#if 0 + GstSegment video_segment; + gboolean video_custom_flush_finished; + gboolean video_ignore_wrong_state; + gboolean video_pending_flush; + + GstSegment audio_segment; + gboolean audio_custom_flush_finished; + gboolean audio_ignore_wrong_state; + gboolean audio_pending_flush; +#endif + + GstSegment text_segment; + gboolean text_custom_flush_finished; + gboolean text_ignore_wrong_state; + gboolean text_pending_flush; }; struct _GstPlaySinkClass @@ -261,6 +332,8 @@ enum PROP_VIDEO_SINK, PROP_AUDIO_SINK, PROP_TEXT_SINK, + PROP_SEND_EVENT_MODE, + PROP_FORCE_ASPECT_RATIO, PROP_LAST }; @@ -288,6 +361,21 @@ static GstStateChangeReturn gst_play_sink_change_state (GstElement * element, static void gst_play_sink_handle_message (GstBin * bin, GstMessage * message); +/* sending audio/video flushes break stream changes when the pipeline + * is paused and played again in 0.10 */ +#if 0 +static gboolean gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event); +static GstFlowReturn gst_play_sink_video_sink_chain (GstPad * pad, + GstBuffer * buffer); +static gboolean gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event); +static GstFlowReturn gst_play_sink_audio_sink_chain (GstPad * pad, + GstBuffer * buffer); +#endif +static gboolean gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent, + GstEvent * event); +static GstFlowReturn gst_play_sink_text_sink_chain (GstPad * pad, + GstObject * parent, GstBuffer * buffer); + static void notify_volume_cb (GObject * object, GParamSpec * pspec, GstPlaySink * playsink); static void notify_mute_cb (GObject * object, GParamSpec * pspec, @@ -295,40 +383,46 @@ static void notify_mute_cb (GObject * object, GParamSpec * pspec, static void update_av_offset (GstPlaySink * playsink); -void -gst_play_marshal_SAMPLE__BOXED (GClosure * closure, - GValue * return_value G_GNUC_UNUSED, - guint n_param_values, - const GValue * param_values, - gpointer invocation_hint G_GNUC_UNUSED, gpointer marshal_data) -{ - typedef GstSample *(*GMarshalFunc_OBJECT__BOXED) (gpointer data1, - gpointer arg_1, gpointer data2); - register GMarshalFunc_OBJECT__BOXED callback; - register GCClosure *cc = (GCClosure *) closure; - register gpointer data1, data2; - GstSample *v_return; - g_return_if_fail (return_value != NULL); - g_return_if_fail (n_param_values == 2); - - if (G_CCLOSURE_SWAP_DATA (closure)) { - data1 = closure->data; - data2 = g_value_peek_pointer (param_values + 0); - } else { - data1 = g_value_peek_pointer (param_values + 0); - data2 = closure->data; - } - callback = - (GMarshalFunc_OBJECT__BOXED) (marshal_data ? marshal_data : cc->callback); - - v_return = callback (data1, g_value_get_boxed (param_values + 1), data2); +static gboolean gst_play_sink_do_reconfigure (GstPlaySink * playsink); - gst_value_take_sample (return_value, v_return); -} +static GQuark _playsink_reset_segment_event_marker_id = 0; /* static guint gst_play_sink_signals[LAST_SIGNAL] = { 0 }; */ -G_DEFINE_TYPE (GstPlaySink, gst_play_sink, GST_TYPE_BIN); +static void gst_play_sink_overlay_init (gpointer g_iface, + gpointer g_iface_data); +static void gst_play_sink_navigation_init (gpointer g_iface, + gpointer g_iface_data); +static void gst_play_sink_colorbalance_init (gpointer g_iface, + gpointer g_iface_data); + +static void +_do_init (GType type) +{ + static const GInterfaceInfo svol_info = { + NULL, NULL, NULL + }; + static const GInterfaceInfo ov_info = { + gst_play_sink_overlay_init, + NULL, NULL + }; + static const GInterfaceInfo nav_info = { + gst_play_sink_navigation_init, + NULL, NULL + }; + static const GInterfaceInfo col_info = { + gst_play_sink_colorbalance_init, + NULL, NULL + }; + + g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_info); + g_type_add_interface_static (type, GST_TYPE_VIDEO_OVERLAY, &ov_info); + g_type_add_interface_static (type, GST_TYPE_NAVIGATION, &nav_info); + g_type_add_interface_static (type, GST_TYPE_COLOR_BALANCE, &col_info); +} + +G_DEFINE_TYPE_WITH_CODE (GstPlaySink, gst_play_sink, GST_TYPE_BIN, + _do_init (g_define_type_id)); static void gst_play_sink_class_init (GstPlaySinkClass * klass) @@ -438,6 +532,7 @@ gst_play_sink_class_init (GstPlaySinkClass * klass) g_param_spec_object ("audio-sink", "Audio Sink", "the audio output element to use (NULL = default sink)", GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + /** * GstPlaySink:text-sink: * @@ -448,13 +543,39 @@ gst_play_sink_class_init (GstPlaySinkClass * klass) */ g_object_class_install_property (gobject_klass, PROP_TEXT_SINK, g_param_spec_object ("text-sink", "Text sink", - "the text output element to use (NULL = default textoverlay)", + "the text output element to use (NULL = default subtitleoverlay)", GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + /** + * GstPlaySink::send-event-mode: + * + * Sets the handling method used for events received from send_event + * function. The default is %MODE_DEFAULT, that uses %GstBin's default + * handling (push the event to all internal sinks). + * + * Since: 0.10.37 + */ + g_object_class_install_property (gobject_klass, PROP_SEND_EVENT_MODE, + g_param_spec_enum ("send-event-mode", "Send event mode", + "How to send events received in send_event function", + GST_TYPE_PLAY_SINK_SEND_EVENT_MODE, MODE_DEFAULT, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + /** + * GstPlaySink::force-aspect-ratio: + * + * Requests the video sink to enforce the video display aspect ratio. + * + * Since: 0.10.37 + */ + g_object_class_install_property (gobject_klass, PROP_FORCE_ASPECT_RATIO, + g_param_spec_boolean ("force-aspect-ratio", "Force Aspect Ratio", + "When enabled, scaling will respect original aspect ratio", TRUE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); g_signal_new ("reconfigure", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstPlaySinkClass, - reconfigure), NULL, NULL, gst_marshal_BOOLEAN__VOID, G_TYPE_BOOLEAN, + reconfigure), NULL, NULL, g_cclosure_marshal_generic, G_TYPE_BOOLEAN, 0, G_TYPE_NONE); /** * GstPlaySink::convert-sample @@ -474,7 +595,7 @@ gst_play_sink_class_init (GstPlaySinkClass * klass) g_signal_new ("convert-sample", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstPlaySinkClass, convert_sample), NULL, NULL, - gst_play_marshal_SAMPLE__BOXED, GST_TYPE_SAMPLE, 1, GST_TYPE_CAPS); + g_cclosure_marshal_generic, GST_TYPE_SAMPLE, 1, GST_TYPE_CAPS); gst_element_class_add_pad_template (gstelement_klass, gst_static_pad_template_get (&audiorawtemplate)); @@ -486,7 +607,7 @@ gst_play_sink_class_init (GstPlaySinkClass * klass) gst_static_pad_template_get (&videotemplate)); gst_element_class_add_pad_template (gstelement_klass, gst_static_pad_template_get (&texttemplate)); - gst_element_class_set_details_simple (gstelement_klass, "Player Sink", + gst_element_class_set_static_metadata (gstelement_klass, "Player Sink", "Generic/Bin/Sink", "Convenience sink for multiple streams", "Wim Taymans "); @@ -504,11 +625,19 @@ gst_play_sink_class_init (GstPlaySinkClass * klass) klass->reconfigure = GST_DEBUG_FUNCPTR (gst_play_sink_reconfigure); klass->convert_sample = GST_DEBUG_FUNCPTR (gst_play_sink_convert_sample); + + _playsink_reset_segment_event_marker_id = + g_quark_from_static_string ("gst-playsink-reset-segment-event-marker"); + + g_type_class_ref (GST_TYPE_STREAM_SYNCHRONIZER); + g_type_class_ref (GST_TYPE_COLOR_BALANCE_CHANNEL); } static void gst_play_sink_init (GstPlaySink * playsink) { + GstColorBalanceChannel *channel; + /* init groups */ playsink->video_sink = NULL; playsink->audio_sink = NULL; @@ -518,14 +647,56 @@ gst_play_sink_init (GstPlaySink * playsink) playsink->font_desc = NULL; playsink->subtitle_encoding = NULL; playsink->flags = DEFAULT_FLAGS; + playsink->send_event_mode = MODE_DEFAULT; + playsink->force_aspect_ratio = TRUE; playsink->stream_synchronizer = g_object_new (GST_TYPE_STREAM_SYNCHRONIZER, NULL); gst_bin_add (GST_BIN_CAST (playsink), GST_ELEMENT_CAST (playsink->stream_synchronizer)); - g_static_rec_mutex_init (&playsink->lock); + g_rec_mutex_init (&playsink->lock); GST_OBJECT_FLAG_SET (playsink, GST_ELEMENT_FLAG_SINK); + + channel = + GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, + NULL)); + channel->label = g_strdup ("CONTRAST"); + channel->min_value = -1000; + channel->max_value = 1000; + playsink->colorbalance_channels = + g_list_append (playsink->colorbalance_channels, channel); + playsink->colorbalance_values[0] = 0; + + channel = + GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, + NULL)); + channel->label = g_strdup ("BRIGHTNESS"); + channel->min_value = -1000; + channel->max_value = 1000; + playsink->colorbalance_channels = + g_list_append (playsink->colorbalance_channels, channel); + playsink->colorbalance_values[1] = 0; + + channel = + GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, + NULL)); + channel->label = g_strdup ("HUE"); + channel->min_value = -1000; + channel->max_value = 1000; + playsink->colorbalance_channels = + g_list_append (playsink->colorbalance_channels, channel); + playsink->colorbalance_values[2] = 0; + + channel = + GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, + NULL)); + channel->label = g_strdup ("SATURATION"); + channel->min_value = -1000; + channel->max_value = 1000; + playsink->colorbalance_channels = + g_list_append (playsink->colorbalance_channels, channel); + playsink->colorbalance_values[3] = 0; } static void @@ -617,6 +788,11 @@ gst_play_sink_dispose (GObject * object) playsink->stream_synchronizer = NULL; + g_list_foreach (playsink->colorbalance_channels, (GFunc) gst_object_unref, + NULL); + g_list_free (playsink->colorbalance_channels); + playsink->colorbalance_channels = NULL; + G_OBJECT_CLASS (gst_play_sink_parent_class)->dispose (object); } @@ -627,7 +803,7 @@ gst_play_sink_finalize (GObject * object) playsink = GST_PLAY_SINK (object); - g_static_rec_mutex_free (&playsink->lock); + g_rec_mutex_clear (&playsink->lock); G_OBJECT_CLASS (gst_play_sink_parent_class)->finalize (object); } @@ -736,7 +912,7 @@ gst_play_sink_vis_blocked (GstPad * tee_pad, GstPadProbeInfo * info, goto done; /* unlink the old plugin and unghost the pad */ - gst_pad_unlink (chain->blockpad, chain->vissinkpad); + gst_pad_unlink (chain->vispeerpad, chain->vissinkpad); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad), NULL); /* set the old plugin to NULL and remove */ @@ -753,7 +929,7 @@ gst_play_sink_vis_blocked (GstPad * tee_pad, GstPadProbeInfo * info, chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src"); /* link pads */ - gst_pad_link_full (chain->blockpad, chain->vissinkpad, + gst_pad_link_full (chain->vispeerpad, chain->vissinkpad, GST_PAD_LINK_CHECK_NOTHING); gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad), chain->vissrcpad); @@ -1073,9 +1249,10 @@ gst_play_sink_find_property (GstPlaySink * playsink, GstElement * obj, found = gst_iterator_find_custom (it, (GCompareFunc) find_property, &item, &helper); gst_iterator_free (it); - if (found) + if (found) { result = g_value_dup_object (&item); - g_value_unset (&item); + g_value_unset (&item); + } } else { if (element_has_property (obj, name, expected_type)) { result = obj; @@ -1110,7 +1287,9 @@ do_async_done (GstPlaySink * playsink) if (playsink->async_pending) { GST_INFO_OBJECT (playsink, "Sending async_done message"); - message = gst_message_new_async_done (GST_OBJECT_CAST (playsink), FALSE); + message = + gst_message_new_async_done (GST_OBJECT_CAST (playsink), + GST_CLOCK_TIME_NONE); GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST (playsink), message); @@ -1189,6 +1368,10 @@ gen_video_deinterlace_chain (GstPlaySink * playsink) GST_DEBUG_OBJECT (playsink, "creating deinterlace"); chain->deinterlace = gst_element_factory_make ("deinterlace", "deinterlace"); if (chain->deinterlace == NULL) { + chain->deinterlace = + gst_element_factory_make ("avdeinterlace", "deinterlace"); + } + if (chain->deinterlace == NULL) { post_missing_element_message (playsink, "deinterlace"); GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, (_("Missing element '%s' - check your GStreamer installation."), @@ -1235,6 +1418,151 @@ link_failed: } } +static gboolean +is_valid_color_balance_element (GstColorBalance * bal) +{ + gboolean have_brightness = FALSE; + gboolean have_contrast = FALSE; + gboolean have_hue = FALSE; + gboolean have_saturation = FALSE; + const GList *channels, *l; + + channels = gst_color_balance_list_channels (bal); + for (l = channels; l; l = l->next) { + GstColorBalanceChannel *ch = l->data; + + if (g_strrstr (ch->label, "BRIGHTNESS")) + have_brightness = TRUE; + else if (g_strrstr (ch->label, "CONTRAST")) + have_contrast = TRUE; + else if (g_strrstr (ch->label, "HUE")) + have_hue = TRUE; + else if (g_strrstr (ch->label, "SATURATION")) + have_saturation = TRUE; + } + + return have_brightness && have_contrast && have_hue && have_saturation; +} + +static void +iterate_color_balance_elements (const GValue * item, gpointer user_data) +{ + gboolean valid; + GstColorBalance *cb, **cb_out = user_data; + + cb = GST_COLOR_BALANCE (g_value_get_object (item)); + valid = is_valid_color_balance_element (cb); + if (valid) { + if (*cb_out + && gst_color_balance_get_balance_type (*cb_out) == + GST_COLOR_BALANCE_SOFTWARE) { + gst_object_unref (*cb_out); + *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb)); + } else if (!*cb_out) { + *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb)); + } + } +} + +static GstColorBalance * +find_color_balance_element (GstElement * element) +{ + GstIterator *it; + GstColorBalance *cb = NULL; + + if (GST_IS_COLOR_BALANCE (element) + && is_valid_color_balance_element (GST_COLOR_BALANCE (element))) + return GST_COLOR_BALANCE (gst_object_ref (element)); + else if (!GST_IS_BIN (element)) + return FALSE; + + it = gst_bin_iterate_all_by_interface (GST_BIN (element), + GST_TYPE_COLOR_BALANCE); + while (gst_iterator_foreach (it, iterate_color_balance_elements, + &cb) == GST_ITERATOR_RESYNC) + gst_iterator_resync (it); + gst_iterator_free (it); + + return cb; +} + +static void +colorbalance_value_changed_cb (GstColorBalance * balance, + GstColorBalanceChannel * channel, gint value, GstPlaySink * playsink) +{ + GList *l; + gint i; + + for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) { + GstColorBalanceChannel *proxy = l->data; + + if (g_strrstr (channel->label, proxy->label)) { + gdouble new_val; + + /* Convert to [0, 1] range */ + new_val = + ((gdouble) value - + (gdouble) channel->min_value) / ((gdouble) channel->max_value - + (gdouble) channel->min_value); + /* Convert to proxy range */ + new_val = + proxy->min_value + new_val * ((gdouble) proxy->max_value - + (gdouble) proxy->min_value); + playsink->colorbalance_values[i] = (gint) (0.5 + new_val); + + gst_color_balance_value_changed (GST_COLOR_BALANCE (playsink), proxy, + playsink->colorbalance_values[i]); + break; + } + } +} + +static void +update_colorbalance (GstPlaySink * playsink) +{ + GstColorBalance *balance = NULL; + GList *l; + gint i; + + GST_OBJECT_LOCK (playsink); + if (playsink->colorbalance_element) { + balance = + GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element)); + } + GST_OBJECT_UNLOCK (playsink); + if (!balance) + return; + + g_signal_handlers_block_by_func (balance, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + + for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) { + GstColorBalanceChannel *proxy = l->data; + GstColorBalanceChannel *channel = NULL; + const GList *channels, *k; + + channels = gst_color_balance_list_channels (balance); + for (k = channels; k; k = k->next) { + GstColorBalanceChannel *tmp = k->data; + + if (g_strrstr (tmp->label, proxy->label)) { + channel = tmp; + break; + } + } + + g_assert (channel); + + gst_color_balance_set_value (balance, channel, + playsink->colorbalance_values[i]); + } + + g_signal_handlers_unblock_by_func (balance, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + + gst_object_unref (balance); +} + /* make the element (bin) that contains the elements needed to perform * video display. * @@ -1302,6 +1630,14 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) chain->async = TRUE; } + /* Make sure the aspect ratio is kept */ + elem = + gst_play_sink_find_property_sinks (playsink, chain->sink, + "force-aspect-ratio", G_TYPE_BOOLEAN); + if (elem) + g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio, + NULL); + /* find ts-offset element */ gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *) gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset", @@ -1314,6 +1650,34 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) gst_object_ref_sink (bin); gst_bin_add (bin, chain->sink); + /* Get the VideoOverlay element */ + { + GstVideoOverlay *overlay = NULL; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = + GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin), + GST_TYPE_VIDEO_OVERLAY)); + if (playsink->overlay_element) + overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + GST_OBJECT_UNLOCK (playsink); + + if (overlay) { + if (playsink->overlay_handle_set) + gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle); + if (playsink->overlay_handle_events_set) + gst_video_overlay_handle_events (overlay, + playsink->overlay_handle_events); + if (playsink->overlay_render_rectangle_set) + gst_video_overlay_set_render_rectangle (overlay, + playsink->overlay_x, playsink->overlay_y, + playsink->overlay_width, playsink->overlay_height); + gst_object_unref (overlay); + } + } + /* decouple decoder from sink, this improves playback quite a lot since the * decoder can continue while the sink blocks for synchronisation. We don't * need a lot of buffers as this consumes a lot of memory and we don't want @@ -1333,10 +1697,38 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) head = prev = chain->queue; } - if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) { + GST_OBJECT_LOCK (playsink); + if (playsink->colorbalance_element) { + g_signal_handlers_disconnect_by_func (playsink->colorbalance_element, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + gst_object_unref (playsink->colorbalance_element); + } + playsink->colorbalance_element = find_color_balance_element (chain->sink); + if (playsink->colorbalance_element) { + g_signal_connect (playsink->colorbalance_element, "value-changed", + G_CALLBACK (colorbalance_value_changed_cb), playsink); + } + GST_OBJECT_UNLOCK (playsink); + + if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO) + || (!playsink->colorbalance_element + && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE))) { + gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO); + gboolean use_balance = !playsink->colorbalance_element + && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE); + GST_DEBUG_OBJECT (playsink, "creating videoconverter"); chain->conv = - g_object_new (GST_TYPE_PLAY_SINK_VIDEO_CONVERT, "name", "vconv", NULL); + g_object_new (GST_TYPE_PLAY_SINK_VIDEO_CONVERT, "name", "vconv", + "use-converters", use_converters, "use-balance", use_balance, NULL); + + GST_OBJECT_LOCK (playsink); + if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance) + playsink->colorbalance_element = + GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT + (chain->conv)->balance)); + GST_OBJECT_UNLOCK (playsink); + gst_bin_add (bin, chain->conv); if (prev) { if (!gst_element_link_pads_full (prev, "src", chain->conv, "sink", @@ -1348,6 +1740,8 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) prev = chain->conv; } + update_colorbalance (playsink); + if (prev) { GST_DEBUG_OBJECT (playsink, "linking to sink"); if (!gst_element_link_pads_full (prev, "src", chain->sink, NULL, @@ -1357,8 +1751,17 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) pad = gst_element_get_static_pad (head, "sink"); chain->sinkpad = gst_ghost_pad_new ("sink", pad); - gst_object_unref (pad); + /* sending audio/video flushes break stream changes when the pipeline + * is paused and played again in 0.10 */ +#if 0 + gst_pad_set_event_function (chain->sinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_event)); + gst_pad_set_chain_function (chain->sinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_chain)); +#endif + + gst_object_unref (pad); gst_element_add_pad (chain->chain.bin, chain->sinkpad); return chain; @@ -1394,6 +1797,7 @@ no_sinks: free_chain ((GstPlayChain *) chain); return NULL; } + link_failed: { GST_ELEMENT_ERROR (playsink, CORE, PAD, @@ -1427,8 +1831,35 @@ setup_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) if (ret == GST_STATE_CHANGE_FAILURE) return FALSE; - /* find ts-offset element */ + /* Get the VideoOverlay element */ + { + GstVideoOverlay *overlay = NULL; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = + GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin), + GST_TYPE_VIDEO_OVERLAY)); + if (playsink->overlay_element) + overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + GST_OBJECT_UNLOCK (playsink); + + if (overlay) { + if (playsink->overlay_handle_set) + gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle); + if (playsink->overlay_handle_events_set) + gst_video_overlay_handle_events (overlay, + playsink->overlay_handle_events); + if (playsink->overlay_render_rectangle_set) + gst_video_overlay_set_render_rectangle (overlay, + playsink->overlay_x, playsink->overlay_y, + playsink->overlay_width, playsink->overlay_height); + gst_object_unref (overlay); + } + } + /* find ts-offset element */ gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *) gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset", G_TYPE_INT64)); @@ -1447,9 +1878,353 @@ setup_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async) GST_DEBUG_OBJECT (playsink, "no async property on the sink"); chain->async = TRUE; } + + /* Make sure the aspect ratio is kept */ + elem = + gst_play_sink_find_property_sinks (playsink, chain->sink, + "force-aspect-ratio", G_TYPE_BOOLEAN); + if (elem) + g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio, + NULL); + + GST_OBJECT_LOCK (playsink); + if (playsink->colorbalance_element) { + g_signal_handlers_disconnect_by_func (playsink->colorbalance_element, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + gst_object_unref (playsink->colorbalance_element); + } + playsink->colorbalance_element = find_color_balance_element (chain->sink); + if (playsink->colorbalance_element) { + g_signal_connect (playsink->colorbalance_element, "value-changed", + G_CALLBACK (colorbalance_value_changed_cb), playsink); + } + GST_OBJECT_UNLOCK (playsink); + + if (chain->conv) { + gboolean use_balance = !playsink->colorbalance_element + && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE); + + g_object_set (chain->conv, "use-balance", use_balance, NULL); + + GST_OBJECT_LOCK (playsink); + if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance) + playsink->colorbalance_element = + GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT + (chain->conv)->balance)); + GST_OBJECT_UNLOCK (playsink); + } + + update_colorbalance (playsink); + return TRUE; } +static void +_generate_update_newsegment_event (GstPad * pad, GstSegment * segment, + GstEvent ** event1) +{ + GstEvent *event; + GstStructure *structure; + event = gst_event_new_segment (segment); + structure = gst_event_writable_structure (event); + gst_structure_id_set (structure, + _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL); + *event1 = event; +} + +static gboolean +gst_play_sink_sink_event (GstPad * pad, GstObject * parent, GstEvent * event, + const gchar * sink_type, + gboolean * sink_ignore_wrong_state, + gboolean * sink_custom_flush_finished, + gboolean * sink_pending_flush, GstSegment * sink_segment) +{ + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent)); + gboolean ret; + const GstStructure *structure = gst_event_get_structure (event); + + if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_DOWNSTREAM_OOB && structure) { + gchar *custom_flush; + gchar *custom_flush_finish; + + custom_flush = g_strdup_printf ("playsink-custom-%s-flush", sink_type); + custom_flush_finish = + g_strdup_printf ("playsink-custom-%s-flush-finish", sink_type); + if (strcmp (gst_structure_get_name (structure), custom_flush) == 0) { + GST_DEBUG_OBJECT (pad, + "Custom %s flush event received, marking to flush %s", sink_type, + sink_type); + GST_PLAY_SINK_LOCK (playsink); + *sink_ignore_wrong_state = TRUE; + *sink_custom_flush_finished = FALSE; + GST_PLAY_SINK_UNLOCK (playsink); + } else if (strcmp (gst_structure_get_name (structure), + custom_flush_finish) == 0) { + GST_DEBUG_OBJECT (pad, "Custom %s flush finish event received", + sink_type); + GST_PLAY_SINK_LOCK (playsink); + *sink_pending_flush = TRUE; + *sink_custom_flush_finished = TRUE; + GST_PLAY_SINK_UNLOCK (playsink); + } + + g_free (custom_flush); + g_free (custom_flush_finish); + } else if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) { + GST_PLAY_SINK_LOCK (playsink); + GST_DEBUG_OBJECT (pad, "Resetting %s segment because of flush-stop event", + sink_type); + gst_segment_init (sink_segment, GST_FORMAT_UNDEFINED); + GST_PLAY_SINK_UNLOCK (playsink); + } + + GST_DEBUG_OBJECT (pad, "Forwarding event %" GST_PTR_FORMAT, event); + ret = gst_pad_event_default (pad, parent, gst_event_ref (event)); + + if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { + const GstSegment *segment; + + gst_event_parse_segment (event, &segment); + GST_DEBUG_OBJECT (pad, "Segment event: %" GST_SEGMENT_FORMAT, segment); + + GST_PLAY_SINK_LOCK (playsink); + if (sink_segment->format != segment->format) { + GST_DEBUG_OBJECT (pad, "%s segment format changed: %s -> %s", + sink_type, + gst_format_get_name (sink_segment->format), + gst_format_get_name (segment->format)); + gst_segment_init (sink_segment, segment->format); + } + + GST_DEBUG_OBJECT (pad, "Old %s segment: %" GST_SEGMENT_FORMAT, + sink_type, sink_segment); + gst_segment_copy_into (&playsink->text_segment, sink_segment); + GST_DEBUG_OBJECT (pad, "New %s segment: %" GST_SEGMENT_FORMAT, + sink_type, sink_segment); + GST_PLAY_SINK_UNLOCK (playsink); + } + + gst_event_unref (event); + gst_object_unref (playsink); + return ret; +} + +static GstFlowReturn +gst_play_sink_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer, + const gchar * sink_type, + gboolean * sink_ignore_wrong_state, + gboolean * sink_custom_flush_finished, + gboolean * sink_pending_flush, GstSegment * sink_segment) +{ + GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad)); + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin)); + GstFlowReturn ret; + + GST_PLAY_SINK_LOCK (playsink); + + if (*sink_pending_flush) { + GstEvent *event; + GstStructure *structure; + + *sink_pending_flush = FALSE; + + GST_PLAY_SINK_UNLOCK (playsink); + + /* make the bin drop all cached data. + * This event will be dropped on the src pad, if any. */ + event = gst_event_new_flush_start (); + structure = gst_event_writable_structure (event); + gst_structure_id_set (structure, + _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL); + + GST_DEBUG_OBJECT (pad, + "Pushing %s flush-start event with reset segment marker set: %" + GST_PTR_FORMAT, sink_type, event); + gst_pad_send_event (pad, event); + + /* make queue drop all cached data. + * This event will be dropped on the src pad. */ + event = gst_event_new_flush_stop (TRUE); + structure = gst_event_writable_structure (event); + gst_structure_id_set (structure, + _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL); + + GST_DEBUG_OBJECT (pad, + "Pushing %s flush-stop event with reset segment marker set: %" + GST_PTR_FORMAT, sink_type, event); + gst_pad_send_event (pad, event); + + /* Re-sync queue segment info after flush-stop. + * This event will be dropped on the src pad. */ + if (sink_segment->format != GST_FORMAT_UNDEFINED) { + GstEvent *event1; + + _generate_update_newsegment_event (pad, sink_segment, &event1); + GST_DEBUG_OBJECT (playsink, + "Pushing segment event with reset " + "segment marker set: %" GST_PTR_FORMAT, event1); + gst_pad_send_event (pad, event1); + } + } else { + GST_PLAY_SINK_UNLOCK (playsink); + } + + ret = gst_proxy_pad_chain_default (pad, parent, buffer); + + GST_PLAY_SINK_LOCK (playsink); + if (ret == GST_FLOW_FLUSHING && *sink_ignore_wrong_state) { + GST_DEBUG_OBJECT (pad, "Ignoring wrong state for %s during flush", + sink_type); + if (*sink_custom_flush_finished) { + GST_DEBUG_OBJECT (pad, "Custom flush finished, stop ignoring " + "wrong state for %s", sink_type); + *sink_ignore_wrong_state = FALSE; + } + + ret = GST_FLOW_OK; + } + GST_PLAY_SINK_UNLOCK (playsink); + + gst_object_unref (playsink); + gst_object_unref (tbin); + return ret; +} + +/* sending audio/video flushes break stream changes when the pipeline + * is paused and played again in 0.10 */ +#if 0 +static gboolean +gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event) +{ + GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad)); + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin)); + gboolean ret; + + ret = gst_play_sink_sink_event (pad, event, "video", + &playsink->video_ignore_wrong_state, + &playsink->video_custom_flush_finished, + &playsink->video_pending_flush, &playsink->video_segment); + + gst_object_unref (playsink); + gst_object_unref (tbin); + return ret; +} + +static GstFlowReturn +gst_play_sink_video_sink_chain (GstPad * pad, GstBuffer * buffer) +{ + GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad)); + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin)); + gboolean ret; + + ret = gst_play_sink_sink_chain (pad, buffer, "video", + &playsink->video_ignore_wrong_state, + &playsink->video_custom_flush_finished, + &playsink->video_pending_flush, &playsink->video_segment); + + gst_object_unref (playsink); + gst_object_unref (tbin); + return ret; +} + +static gboolean +gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event) +{ + GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad)); + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin)); + gboolean ret; + + ret = gst_play_sink_sink_event (pad, event, "audio", + &playsink->audio_ignore_wrong_state, + &playsink->audio_custom_flush_finished, + &playsink->audio_pending_flush, &playsink->audio_segment); + + gst_object_unref (playsink); + gst_object_unref (tbin); + return ret; +} + +static GstFlowReturn +gst_play_sink_audio_sink_chain (GstPad * pad, GstBuffer * buffer) +{ + GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad)); + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin)); + gboolean ret; + + ret = gst_play_sink_sink_chain (pad, buffer, "audio", + &playsink->audio_ignore_wrong_state, + &playsink->audio_custom_flush_finished, + &playsink->audio_pending_flush, &playsink->audio_segment); + + gst_object_unref (playsink); + gst_object_unref (tbin); + return ret; +} +#endif + +static gboolean +gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent, + GstEvent * event) +{ + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent)); + gboolean ret; + + ret = gst_play_sink_sink_event (pad, parent, event, "subtitle", + &playsink->text_ignore_wrong_state, + &playsink->text_custom_flush_finished, + &playsink->text_pending_flush, &playsink->text_segment); + + gst_object_unref (playsink); + + return ret; +} + +static GstFlowReturn +gst_play_sink_text_sink_chain (GstPad * pad, GstObject * parent, + GstBuffer * buffer) +{ + gboolean ret; + GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent)); + + ret = gst_play_sink_sink_chain (pad, parent, buffer, "subtitle", + &playsink->text_ignore_wrong_state, + &playsink->text_custom_flush_finished, + &playsink->text_pending_flush, &playsink->text_segment); + + gst_object_unref (playsink); + return ret; +} + +static gboolean +gst_play_sink_text_src_event (GstPad * pad, GstObject * parent, + GstEvent * event) +{ + gboolean ret; + const GstStructure *structure; + + GST_DEBUG_OBJECT (pad, "Got event %" GST_PTR_FORMAT, event); + + structure = gst_event_get_structure (event); + + if (structure && + gst_structure_id_has_field (structure, + _playsink_reset_segment_event_marker_id)) { + /* the events marked with a reset segment marker + * are sent internally to reset the queue and + * must be dropped here */ + GST_DEBUG_OBJECT (pad, "Dropping event with reset " + "segment marker set: %" GST_PTR_FORMAT, event); + ret = TRUE; + goto out; + } + + ret = gst_pad_event_default (pad, parent, gst_event_ref (event)); + +out: + gst_event_unref (event); + return ret; +} + /* make an element for playback of video with subtitles embedded. * Only used for *raw* video streams. * @@ -1506,7 +2281,7 @@ gen_text_chain (GstPlaySink * playsink) "queue"), ("rendering might be suboptimal")); } else { g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3, - "max-size-bytes", 0, "max-size-time", (gint64) 0, + "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND, "silent", TRUE, NULL); gst_bin_add (bin, chain->queue); } @@ -1525,7 +2300,7 @@ gen_text_chain (GstPlaySink * playsink) chain->queue = NULL; } /* try to set sync to true but it's no biggie when we can't */ - if ((elem = + if (chain->sink && (elem = gst_play_sink_find_property_sinks (playsink, chain->sink, "sync", G_TYPE_BOOLEAN))) g_object_set (elem, "sync", TRUE, NULL); @@ -1540,7 +2315,7 @@ gen_text_chain (GstPlaySink * playsink) if (textsinkpad == NULL) { GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN, (_("Custom text sink element is not usable.")), - ("fallback to default textoverlay")); + ("fallback to default subtitleoverlay")); } } @@ -1595,13 +2370,21 @@ gen_text_chain (GstPlaySink * playsink) "queue"), ("rendering might be suboptimal")); } else { g_object_set (G_OBJECT (element), "max-size-buffers", 3, - "max-size-bytes", 0, "max-size-time", (gint64) 0, + "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND, "silent", TRUE, NULL); gst_bin_add (bin, element); - gst_element_link_pads_full (element, "src", chain->overlay, - "subtitle_sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS); - textsinkpad = gst_element_get_static_pad (element, "sink"); - srcpad = gst_element_get_static_pad (chain->overlay, "src"); + if (gst_element_link_pads_full (element, "src", chain->overlay, + "subtitle_sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) { + textsinkpad = gst_element_get_static_pad (element, "sink"); + srcpad = gst_element_get_static_pad (chain->overlay, "src"); + } else { + gst_bin_remove (bin, chain->sink); + gst_bin_remove (bin, chain->overlay); + chain->sink = NULL; + chain->overlay = NULL; + gst_object_unref (videosinkpad); + videosinkpad = NULL; + } } } } @@ -1635,11 +2418,21 @@ gen_text_chain (GstPlaySink * playsink) if (textsinkpad) { chain->textsinkpad = gst_ghost_pad_new ("text_sink", textsinkpad); gst_object_unref (textsinkpad); + + gst_pad_set_event_function (chain->textsinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_event)); + gst_pad_set_chain_function (chain->textsinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_chain)); + gst_element_add_pad (chain->chain.bin, chain->textsinkpad); } if (srcpad) { chain->srcpad = gst_ghost_pad_new ("src", srcpad); gst_object_unref (srcpad); + + gst_pad_set_event_function (chain->srcpad, + GST_DEBUG_FUNCPTR (gst_play_sink_text_src_event)); + gst_element_add_pad (chain->chain.bin, chain->srcpad); } @@ -1797,10 +2590,10 @@ gen_audio_chain (GstPlaySink * playsink, gboolean raw) } if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO) || (!have_volume - && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) { + && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME))) { gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO); gboolean use_volume = - !have_volume && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME; + !have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME); GST_DEBUG_OBJECT (playsink, "creating audioconvert with use-converters %d, use-volume %d", use_converters, use_volume); @@ -1817,7 +2610,7 @@ gen_audio_chain (GstPlaySink * playsink, gboolean raw) } prev = chain->conv; - if (!have_volume && playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME) { + if (!have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) { GstPlaySinkAudioConvert *conv = GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv); @@ -1860,6 +2653,16 @@ gen_audio_chain (GstPlaySink * playsink, gboolean raw) GST_DEBUG_OBJECT (playsink, "ghosting sink pad"); pad = gst_element_get_static_pad (head, "sink"); chain->sinkpad = gst_ghost_pad_new ("sink", pad); + + /* sending audio/video flushes break stream changes when the pipeline + * is paused and played again in 0.10 */ +#if 0 + gst_pad_set_event_function (chain->sinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_event)); + gst_pad_set_chain_function (chain->sinkpad, + GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_chain)); +#endif + gst_object_unref (pad); gst_element_add_pad (chain->chain.bin, chain->sinkpad); @@ -1915,8 +2718,10 @@ setup_audio_chain (GstPlaySink * playsink, gboolean raw) GstElement *elem; GstPlayAudioChain *chain; GstStateChangeReturn ret; + GstPlaySinkAudioConvert *conv; chain = playsink->audiochain; + conv = GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv); chain->chain.raw = raw; @@ -1965,18 +2770,16 @@ setup_audio_chain (GstPlaySink * playsink, gboolean raw) } g_object_set (chain->conv, "use-volume", FALSE, NULL); - } else { - GstPlaySinkAudioConvert *conv = - GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv); - + } else if (conv) { /* no volume, we need to add a volume element when we can */ - g_object_set (chain->conv, "use-volume", TRUE, NULL); + g_object_set (chain->conv, "use-volume", + ! !(playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME), NULL); GST_DEBUG_OBJECT (playsink, "the sink has no volume property"); /* Disconnect signals */ disconnect_chain (chain, playsink); - if (conv->volume) { + if (conv->volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) { chain->volume = conv->volume; chain->mute = chain->volume; @@ -2044,8 +2847,12 @@ gen_vis_chain (GstPlaySink * playsink) gst_bin_add (bin, chain->resample); /* this pad will be used for blocking the dataflow and switching the vis + * plugin, we block right after the queue, this makes it possible for the + * resample and convert to convert to a format supported by the new vis * plugin */ - chain->blockpad = gst_element_get_static_pad (chain->resample, "src"); + chain->blockpad = gst_element_get_static_pad (chain->queue, "src"); + /* this is the pad where the vis is linked to */ + chain->vispeerpad = gst_element_get_static_pad (chain->resample, "src"); if (playsink->visualisation) { GST_DEBUG_OBJECT (playsink, "trying configure vis"); @@ -2100,7 +2907,7 @@ no_audioconvert: post_missing_element_message (playsink, "audioconvert"); GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN, (_("Missing element '%s' - check your GStreamer installation."), - "audioconvert"), ("possibly a liboil version mismatch?")); + "audioconvert"), ("make sure audioconvert isn't blacklisted")); free_chain ((GstPlayChain *) chain); return NULL; } @@ -2137,8 +2944,8 @@ link_failed: * have to construct the final pipeline. Based on the flags we construct the * final output pipelines. */ -gboolean -gst_play_sink_reconfigure (GstPlaySink * playsink) +static gboolean +gst_play_sink_do_reconfigure (GstPlaySink * playsink) { GstPlayFlags flags; gboolean need_audio, need_video, need_deinterlace, need_vis, need_text; @@ -2188,7 +2995,7 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) /* we have a text_pad and we need text rendering, in this case we need a * video_pad to combine the video with the text or visualizations */ - if (need_text && !need_video) { + if (need_text && !need_video && !playsink->text_sink) { if (playsink->video_pad) { need_video = TRUE; } else if (need_audio) { @@ -2245,6 +3052,19 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE); free_chain ((GstPlayChain *) playsink->videochain); playsink->videochain = NULL; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = NULL; + + if (playsink->colorbalance_element) { + g_signal_handlers_disconnect_by_func (playsink->colorbalance_element, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + gst_object_unref (playsink->colorbalance_element); + } + playsink->colorbalance_element = NULL; + GST_OBJECT_UNLOCK (playsink); } } @@ -2288,6 +3108,8 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE); activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE); + gst_pad_unlink (playsink->video_srcpad_stream_synchronizer, + playsink->videochain->sinkpad); gst_pad_link_full (playsink->video_srcpad_stream_synchronizer, playsink->videodeinterlacechain->sinkpad, GST_PAD_LINK_CHECK_NOTHING); } else { @@ -2305,6 +3127,12 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) if (!need_vis && !need_text && (!playsink->textchain || !playsink->text_pad)) { GST_DEBUG_OBJECT (playsink, "ghosting video sinkpad"); + gst_pad_unlink (playsink->video_srcpad_stream_synchronizer, + playsink->videochain->sinkpad); + if (playsink->videodeinterlacechain + && playsink->videodeinterlacechain->srcpad) + gst_pad_unlink (playsink->videodeinterlacechain->srcpad, + playsink->videochain->sinkpad); if (need_deinterlace) gst_pad_link_full (playsink->videodeinterlacechain->srcpad, playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING); @@ -2358,6 +3186,20 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) if (playsink->video_pad) gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL); + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = NULL; + + if (playsink->colorbalance_element) { + g_signal_handlers_disconnect_by_func (playsink->colorbalance_element, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + gst_object_unref (playsink->colorbalance_element); + } + playsink->colorbalance_element = NULL; + GST_OBJECT_UNLOCK (playsink); + } if (need_audio) { @@ -2559,25 +3401,27 @@ gst_play_sink_reconfigure (GstPlaySink * playsink) playsink->textchain->textsinkpad, GST_PAD_LINK_CHECK_NOTHING); } - if (need_vis) { - GstPad *srcpad; + if (need_vis || need_video) { + if (need_vis) { + GstPad *srcpad; - srcpad = - gst_element_get_static_pad (playsink->vischain->chain.bin, "src"); - gst_pad_unlink (srcpad, playsink->videochain->sinkpad); - gst_pad_link_full (srcpad, playsink->textchain->videosinkpad, - GST_PAD_LINK_CHECK_NOTHING); - gst_object_unref (srcpad); - } else { - if (need_deinterlace) - gst_pad_link_full (playsink->videodeinterlacechain->srcpad, - playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING); - else - gst_pad_link_full (playsink->video_srcpad_stream_synchronizer, - playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING); + srcpad = + gst_element_get_static_pad (playsink->vischain->chain.bin, "src"); + gst_pad_unlink (srcpad, playsink->videochain->sinkpad); + gst_pad_link_full (srcpad, playsink->textchain->videosinkpad, + GST_PAD_LINK_CHECK_NOTHING); + gst_object_unref (srcpad); + } else { + if (need_deinterlace) + gst_pad_link_full (playsink->videodeinterlacechain->srcpad, + playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING); + else + gst_pad_link_full (playsink->video_srcpad_stream_synchronizer, + playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING); + } + gst_pad_link_full (playsink->textchain->srcpad, + playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING); } - gst_pad_link_full (playsink->textchain->srcpad, - playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING); activate_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE); } @@ -2768,8 +3612,10 @@ update_av_offset (GstPlaySink * playsink) vchain = (GstPlayVideoChain *) playsink->videochain; if (achain && vchain && achain->ts_offset && vchain->ts_offset) { - g_object_set (achain->ts_offset, "ts-offset", MAX (0, -av_offset), NULL); - g_object_set (vchain->ts_offset, "ts-offset", MAX (0, av_offset), NULL); + g_object_set (achain->ts_offset, + "ts-offset", MAX (G_GINT64_CONSTANT (0), -av_offset), NULL); + g_object_set (vchain->ts_offset, + "ts-offset", MAX (G_GINT64_CONSTANT (0), av_offset), NULL); } else { GST_LOG_OBJECT (playsink, "no ts_offset elements"); } @@ -2891,8 +3737,7 @@ is_raw_structure (GstStructure * s) name = gst_structure_get_name (s); - if (g_str_has_prefix (name, "video/x-raw") || - g_str_has_prefix (name, "audio/x-raw")) + if (g_str_equal (name, "video/x-raw") || g_str_equal (name, "audio/x-raw")) return TRUE; return FALSE; } @@ -2949,8 +3794,7 @@ video_set_blocked (GstPlaySink * playsink, gboolean blocked) if (blocked && playsink->video_block_id == 0) { playsink->video_block_id = gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - sinkpad_blocked_cb, gst_object_ref (playsink), - (GDestroyNotify) gst_object_unref); + sinkpad_blocked_cb, playsink, NULL); } else if (!blocked && playsink->video_block_id) { gst_pad_remove_probe (opad, playsink->video_block_id); PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO_RAW); @@ -2972,8 +3816,7 @@ audio_set_blocked (GstPlaySink * playsink, gboolean blocked) if (blocked && playsink->audio_block_id == 0) { playsink->audio_block_id = gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - sinkpad_blocked_cb, gst_object_ref (playsink), - (GDestroyNotify) gst_object_unref); + sinkpad_blocked_cb, playsink, NULL); } else if (!blocked && playsink->audio_block_id) { gst_pad_remove_probe (opad, playsink->audio_block_id); PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO_RAW); @@ -2995,8 +3838,7 @@ text_set_blocked (GstPlaySink * playsink, gboolean blocked) if (blocked && playsink->text_block_id == 0) { playsink->text_block_id = gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - sinkpad_blocked_cb, gst_object_ref (playsink), - (GDestroyNotify) gst_object_unref); + sinkpad_blocked_cb, playsink, NULL); } else if (!blocked && playsink->text_block_id) { gst_pad_remove_probe (opad, playsink->text_block_id); PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_TEXT); @@ -3007,6 +3849,20 @@ text_set_blocked (GstPlaySink * playsink, gboolean blocked) } } +gboolean +gst_play_sink_reconfigure (GstPlaySink * playsink) +{ + GST_LOG_OBJECT (playsink, "Triggering reconfiguration"); + + GST_PLAY_SINK_LOCK (playsink); + video_set_blocked (playsink, TRUE); + audio_set_blocked (playsink, TRUE); + text_set_blocked (playsink, TRUE); + GST_PLAY_SINK_UNLOCK (playsink); + + return TRUE; +} + static GstPadProbeReturn sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info, gpointer user_data) @@ -3053,7 +3909,7 @@ sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info, playsink->audio_pad_raw); } - gst_play_sink_reconfigure (playsink); + gst_play_sink_do_reconfigure (playsink); video_set_blocked (playsink, FALSE); audio_set_blocked (playsink, FALSE); @@ -3096,12 +3952,56 @@ caps_notify_cb (GstPad * pad, GParamSpec * unused, GstPlaySink * playsink) gst_caps_unref (caps); - if (reconfigure) { - GST_PLAY_SINK_LOCK (playsink); - video_set_blocked (playsink, TRUE); - audio_set_blocked (playsink, TRUE); - text_set_blocked (playsink, TRUE); + if (reconfigure) + gst_play_sink_reconfigure (playsink); +} + +void +gst_play_sink_refresh_pad (GstPlaySink * playsink, GstPad * pad, + GstPlaySinkType type) +{ + gulong *block_id = NULL; + + GST_DEBUG_OBJECT (playsink, "refresh pad %" GST_PTR_FORMAT, pad); + + GST_PLAY_SINK_LOCK (playsink); + if (pad == playsink->video_pad) { + if (type != GST_PLAY_SINK_TYPE_VIDEO_RAW && + type != GST_PLAY_SINK_TYPE_VIDEO) + goto wrong_type; + block_id = &playsink->video_block_id; + } else if (pad == playsink->audio_pad) { + if (type != GST_PLAY_SINK_TYPE_AUDIO_RAW && + type != GST_PLAY_SINK_TYPE_AUDIO) + goto wrong_type; + block_id = &playsink->audio_block_id; + } else if (pad == playsink->text_pad) { + if (type != GST_PLAY_SINK_TYPE_TEXT) + goto wrong_type; + block_id = &playsink->text_block_id; + } + + if (type != GST_PLAY_SINK_TYPE_FLUSHING && (block_id && *block_id == 0)) { + GstPad *blockpad = + GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (pad))); + + *block_id = + gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, + sinkpad_blocked_cb, playsink, NULL); + PENDING_FLAG_SET (playsink, type); + gst_object_unref (blockpad); + } + GST_PLAY_SINK_UNLOCK (playsink); + + return; + + /* ERRORS */ +wrong_type: + { + GST_WARNING_OBJECT (playsink, "wrong type %u for pad %" GST_PTR_FORMAT, + type, pad); GST_PLAY_SINK_UNLOCK (playsink); + return; } } @@ -3218,8 +4118,7 @@ gst_play_sink_request_pad (GstPlaySink * playsink, GstPlaySinkType type) *block_id = gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - sinkpad_blocked_cb, gst_object_ref (playsink), - (GDestroyNotify) gst_object_unref); + sinkpad_blocked_cb, playsink, NULL); PENDING_FLAG_SET (playsink, type); gst_object_unref (blockpad); } @@ -3230,6 +4129,7 @@ gst_play_sink_request_pad (GstPlaySink * playsink, GstPlaySinkType type) return res; } + static GstPad * gst_play_sink_request_new_pad (GstElement * element, GstPadTemplate * templ, const gchar * name, const GstCaps * caps) @@ -3281,12 +4181,15 @@ gst_play_sink_release_pad (GstPlaySink * playsink, GstPad * pad) res = &playsink->video_pad; g_signal_handlers_disconnect_by_func (playsink->video_pad, caps_notify_cb, playsink); + video_set_blocked (playsink, FALSE); } else if (pad == playsink->audio_pad) { res = &playsink->audio_pad; g_signal_handlers_disconnect_by_func (playsink->audio_pad, caps_notify_cb, playsink); + audio_set_blocked (playsink, FALSE); } else if (pad == playsink->text_pad) { res = &playsink->text_pad; + text_set_blocked (playsink, FALSE); } else { /* try to release the given pad anyway, these could be the FLUSHING pads. */ res = &pad; @@ -3352,6 +4255,47 @@ gst_play_sink_handle_message (GstBin * bin, GstMessage * message) GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message); break; } + case GST_MESSAGE_ELEMENT:{ + if (gst_is_video_overlay_prepare_window_handle_message (message)) { + GstVideoOverlay *overlay; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element + && GST_OBJECT_CAST (playsink->overlay_element) != + GST_MESSAGE_SRC (message)) { + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = NULL; + } + + if (!playsink->overlay_element) + playsink->overlay_element = + GST_VIDEO_OVERLAY (gst_object_ref (GST_MESSAGE_SRC (message))); + overlay = + GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + GST_OBJECT_UNLOCK (playsink); + + GST_DEBUG_OBJECT (playsink, "Got prepare-xwindow-id message"); + + if (playsink->overlay_handle_set) + gst_video_overlay_set_window_handle (playsink->overlay_element, + playsink->overlay_handle); + if (playsink->overlay_handle_events_set) + gst_video_overlay_handle_events (playsink->overlay_element, + playsink->overlay_handle_events); + if (playsink->overlay_render_rectangle_set) + gst_video_overlay_set_render_rectangle (playsink->overlay_element, + playsink->overlay_x, playsink->overlay_y, + playsink->overlay_width, playsink->overlay_height); + + gst_object_unref (overlay); + gst_message_unref (message); + gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (playsink)); + } else { + GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, + message); + } + break; + } default: GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message); break; @@ -3367,31 +4311,41 @@ static gboolean gst_play_sink_send_event_to_sink (GstPlaySink * playsink, GstEvent * event) { gboolean res = TRUE; - - if (playsink->textchain && playsink->textchain->sink) { - gst_event_ref (event); - if ((res = gst_element_send_event (playsink->textchain->chain.bin, event))) { - GST_DEBUG_OBJECT (playsink, "Sent event successfully to text sink"); - } else { - GST_DEBUG_OBJECT (playsink, "Event failed when sent to text sink"); + if (playsink->send_event_mode == MODE_FIRST) { + if (playsink->textchain && playsink->textchain->sink) { + gst_event_ref (event); + if ((res = + gst_element_send_event (playsink->textchain->chain.bin, event))) { + GST_DEBUG_OBJECT (playsink, "Sent event successfully to text sink"); + } else { + GST_DEBUG_OBJECT (playsink, "Event failed when sent to text sink"); + } } - } - if (playsink->videochain) { - gst_event_ref (event); - if ((res = gst_element_send_event (playsink->videochain->chain.bin, event))) { - GST_DEBUG_OBJECT (playsink, "Sent event successfully to video sink"); - goto done; + if (playsink->videochain) { + gst_event_ref (event); + if ((res = + gst_element_send_event (playsink->videochain->chain.bin, + event))) { + GST_DEBUG_OBJECT (playsink, "Sent event successfully to video sink"); + goto done; + } + GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink"); } - GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink"); - } - if (playsink->audiochain) { - gst_event_ref (event); - if ((res = gst_element_send_event (playsink->audiochain->chain.bin, event))) { - GST_DEBUG_OBJECT (playsink, "Sent event successfully to audio sink"); - goto done; + if (playsink->audiochain) { + gst_event_ref (event); + if ((res = + gst_element_send_event (playsink->audiochain->chain.bin, + event))) { + GST_DEBUG_OBJECT (playsink, "Sent event successfully to audio sink"); + goto done; + } + GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink"); } - GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink"); + } else { + return + GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event + (GST_ELEMENT_CAST (playsink), event); } done: @@ -3408,9 +4362,7 @@ gst_play_sink_send_event (GstElement * element, GstEvent * event) gboolean res = FALSE; GstEventType event_type = GST_EVENT_TYPE (event); GstPlaySink *playsink; - playsink = GST_PLAY_SINK_CAST (element); - switch (event_type) { case GST_EVENT_SEEK: GST_DEBUG_OBJECT (element, "Sending event to a sink"); @@ -3422,10 +4374,8 @@ gst_play_sink_send_event (GstElement * element, GstEvent * event) guint64 amount; gdouble rate; gboolean flush, intermediate; - gst_event_parse_step (event, &format, &amount, &rate, &flush, &intermediate); - if (format == GST_FORMAT_BUFFERS) { /* for buffers, we will try to step video frames, for other formats we * send the step to all sinks */ @@ -3451,18 +4401,21 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret; GstStateChangeReturn bret; - GstPlaySink *playsink; - playsink = GST_PLAY_SINK (element); - switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_segment_init (&playsink->text_segment, GST_FORMAT_UNDEFINED); + playsink->need_async_start = TRUE; /* we want to go async to PAUSED until we managed to configure and add the * sinks */ do_async_start (playsink); ret = GST_STATE_CHANGE_ASYNC; + + /* block all pads here */ + if (!gst_play_sink_reconfigure (playsink)) + ret = GST_STATE_CHANGE_FAILURE; break; case GST_STATE_CHANGE_PAUSED_TO_READY: /* unblock all pads here */ @@ -3490,6 +4443,20 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) gst_object_unref (playsink->videochain->ts_offset); playsink->videochain->ts_offset = NULL; } + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + gst_object_unref (playsink->overlay_element); + playsink->overlay_element = NULL; + + if (playsink->colorbalance_element) { + g_signal_handlers_disconnect_by_func (playsink->colorbalance_element, + G_CALLBACK (colorbalance_value_changed_cb), playsink); + gst_object_unref (playsink->colorbalance_element); + } + playsink->colorbalance_element = NULL; + GST_OBJECT_UNLOCK (playsink); + ret = GST_STATE_CHANGE_SUCCESS; break; default: @@ -3599,7 +4566,6 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) if (playsink->textchain && playsink->textchain->sink) gst_bin_remove (GST_BIN_CAST (playsink->textchain->chain.bin), playsink->textchain->sink); - if (playsink->audio_sink != NULL) gst_element_set_state (playsink->audio_sink, GST_STATE_NULL); if (playsink->video_sink != NULL) @@ -3608,7 +4574,6 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) gst_element_set_state (playsink->visualisation, GST_STATE_NULL); if (playsink->text_sink != NULL) gst_element_set_state (playsink->text_sink, GST_STATE_NULL); - free_chain ((GstPlayChain *) playsink->videodeinterlacechain); playsink->videodeinterlacechain = NULL; free_chain ((GstPlayChain *) playsink->videochain); @@ -3625,7 +4590,6 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition) break; } return ret; - /* ERRORS */ activate_failed: { @@ -3640,7 +4604,6 @@ gst_play_sink_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * spec) { GstPlaySink *playsink = GST_PLAY_SINK (object); - switch (prop_id) { case PROP_FLAGS: gst_play_sink_set_flags (playsink, g_value_get_flags (value)); @@ -3676,6 +4639,32 @@ gst_play_sink_set_property (GObject * object, guint prop_id, gst_play_sink_set_sink (playsink, GST_PLAY_SINK_TYPE_TEXT, g_value_get_object (value)); break; + case PROP_SEND_EVENT_MODE: + playsink->send_event_mode = g_value_get_enum (value); + break; + case PROP_FORCE_ASPECT_RATIO:{ + GstPlayVideoChain *chain; + GstElement *elem; + + playsink->force_aspect_ratio = g_value_get_boolean (value); + + GST_PLAY_SINK_LOCK (playsink); + if (playsink->videochain) { + chain = (GstPlayVideoChain *) playsink->videochain; + + if (chain->sink) { + elem = + gst_play_sink_find_property_sinks (playsink, chain->sink, + "force-aspect-ratio", G_TYPE_BOOLEAN); + + if (elem) + g_object_set (elem, "force-aspect-ratio", + playsink->force_aspect_ratio, NULL); + } + } + GST_PLAY_SINK_UNLOCK (playsink); + break; + } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec); break; @@ -3687,7 +4676,6 @@ gst_play_sink_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * spec) { GstPlaySink *playsink = GST_PLAY_SINK (object); - switch (prop_id) { case PROP_FLAGS: g_value_set_flags (value, gst_play_sink_get_flags (playsink)); @@ -3726,18 +4714,289 @@ gst_play_sink_get_property (GObject * object, guint prop_id, g_value_take_object (value, gst_play_sink_get_sink (playsink, GST_PLAY_SINK_TYPE_TEXT)); break; + case PROP_SEND_EVENT_MODE: + g_value_set_enum (value, playsink->send_event_mode); + break; + case PROP_FORCE_ASPECT_RATIO: + g_value_set_boolean (value, playsink->force_aspect_ratio); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec); break; } } +static void +gst_play_sink_overlay_expose (GstVideoOverlay * overlay) +{ + GstPlaySink *playsink = GST_PLAY_SINK (overlay); + GstVideoOverlay *overlay_element; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + overlay_element = + GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + else + overlay_element = NULL; + GST_OBJECT_UNLOCK (playsink); + + if (overlay_element) { + gst_video_overlay_expose (overlay_element); + gst_object_unref (overlay_element); + } +} + +static void +gst_play_sink_overlay_handle_events (GstVideoOverlay * overlay, + gboolean handle_events) +{ + GstPlaySink *playsink = GST_PLAY_SINK (overlay); + GstVideoOverlay *overlay_element; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + overlay_element = + GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + else + overlay_element = NULL; + GST_OBJECT_UNLOCK (playsink); + + playsink->overlay_handle_events_set = TRUE; + playsink->overlay_handle_events = handle_events; + + if (overlay_element) { + gst_video_overlay_handle_events (overlay_element, handle_events); + gst_object_unref (overlay_element); + } +} + +static void +gst_play_sink_overlay_set_render_rectangle (GstVideoOverlay * overlay, gint x, + gint y, gint width, gint height) +{ + GstPlaySink *playsink = GST_PLAY_SINK (overlay); + GstVideoOverlay *overlay_element; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + overlay_element = + GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + else + overlay_element = NULL; + GST_OBJECT_UNLOCK (playsink); + + playsink->overlay_render_rectangle_set = TRUE; + playsink->overlay_x = x; + playsink->overlay_y = y; + playsink->overlay_width = width; + playsink->overlay_height = height; + + if (overlay_element) { + gst_video_overlay_set_render_rectangle (overlay_element, x, y, width, + height); + gst_object_unref (overlay_element); + } +} + +static void +gst_play_sink_overlay_set_window_handle (GstVideoOverlay * overlay, + guintptr handle) +{ + GstPlaySink *playsink = GST_PLAY_SINK (overlay); + GstVideoOverlay *overlay_element; + + GST_OBJECT_LOCK (playsink); + if (playsink->overlay_element) + overlay_element = + GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element)); + else + overlay_element = NULL; + GST_OBJECT_UNLOCK (playsink); + + playsink->overlay_handle_set = TRUE; + playsink->overlay_handle = handle; + + if (overlay_element) { + gst_video_overlay_set_window_handle (overlay_element, handle); + gst_object_unref (overlay_element); + } +} + +static void +gst_play_sink_overlay_init (gpointer g_iface, gpointer g_iface_data) +{ + GstVideoOverlayInterface *iface = (GstVideoOverlayInterface *) g_iface; + iface->expose = gst_play_sink_overlay_expose; + iface->handle_events = gst_play_sink_overlay_handle_events; + iface->set_render_rectangle = gst_play_sink_overlay_set_render_rectangle; + iface->set_window_handle = gst_play_sink_overlay_set_window_handle; +} + +static void +gst_play_sink_navigation_send_event (GstNavigation * navigation, + GstStructure * structure) +{ + GstPlaySink *playsink = GST_PLAY_SINK (navigation); + GstBin *bin = NULL; + + GST_PLAY_SINK_LOCK (playsink); + if (playsink->videochain && playsink->videochain->chain.bin) + bin = GST_BIN (gst_object_ref (playsink->videochain->chain.bin)); + GST_PLAY_SINK_UNLOCK (playsink); + + if (bin) { + GstElement *nav = gst_bin_get_by_interface (bin, GST_TYPE_NAVIGATION); + + if (nav) { + gst_navigation_send_event (GST_NAVIGATION (nav), structure); + structure = NULL; + gst_object_unref (nav); + } else { + GstEvent *event = gst_event_new_navigation (structure); + structure = NULL; + gst_element_send_event (GST_ELEMENT (bin), event); + } + + gst_object_unref (bin); + } + + if (structure) + gst_structure_free (structure); +} + +static void +gst_play_sink_navigation_init (gpointer g_iface, gpointer g_iface_data) +{ + GstNavigationInterface *iface = (GstNavigationInterface *) g_iface; + + iface->send_event = gst_play_sink_navigation_send_event; +} + +static const GList * +gst_play_sink_colorbalance_list_channels (GstColorBalance * balance) +{ + GstPlaySink *playsink = GST_PLAY_SINK (balance); + + return playsink->colorbalance_channels; +} + +static void +gst_play_sink_colorbalance_set_value (GstColorBalance * balance, + GstColorBalanceChannel * proxy, gint value) +{ + GstPlaySink *playsink = GST_PLAY_SINK (balance); + GList *l; + gint i; + GstColorBalance *balance_element = NULL; + + GST_OBJECT_LOCK (playsink); + if (playsink->colorbalance_element) + balance_element = + GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element)); + GST_OBJECT_UNLOCK (playsink); + + for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) { + GstColorBalanceChannel *proxy_tmp = l->data; + gdouble new_val; + + if (proxy_tmp != proxy) + continue; + + playsink->colorbalance_values[i] = value; + + if (balance_element) { + GstColorBalanceChannel *channel = NULL; + const GList *channels, *k; + + channels = gst_color_balance_list_channels (balance_element); + for (k = channels; k; k = k->next) { + GstColorBalanceChannel *tmp = l->data; + + if (g_strrstr (tmp->label, proxy->label)) { + channel = tmp; + break; + } + } + + g_assert (channel); + + /* Convert to [0, 1] range */ + new_val = + ((gdouble) value - + (gdouble) proxy->min_value) / ((gdouble) proxy->max_value - + (gdouble) proxy->min_value); + /* Convert to channel range */ + new_val = + channel->min_value + new_val * ((gdouble) channel->max_value - + (gdouble) channel->min_value); + + gst_color_balance_set_value (balance_element, channel, + (gint) (new_val + 0.5)); + + gst_object_unref (balance_element); + } + + gst_color_balance_value_changed (balance, proxy, value); + break; + } +} + +static gint +gst_play_sink_colorbalance_get_value (GstColorBalance * balance, + GstColorBalanceChannel * proxy) +{ + GstPlaySink *playsink = GST_PLAY_SINK (balance); + GList *l; + gint i; + + for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) { + GstColorBalanceChannel *proxy_tmp = l->data; + + if (proxy_tmp != proxy) + continue; + + return playsink->colorbalance_values[i]; + } + + g_return_val_if_reached (0); +} + +static GstColorBalanceType +gst_play_sink_colorbalance_get_balance_type (GstColorBalance * balance) +{ + GstPlaySink *playsink = GST_PLAY_SINK (balance); + GstColorBalance *balance_element = NULL; + GstColorBalanceType t = GST_COLOR_BALANCE_SOFTWARE; + + GST_OBJECT_LOCK (playsink); + if (playsink->colorbalance_element) + balance_element = + GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element)); + GST_OBJECT_UNLOCK (playsink); + + if (balance_element) { + t = gst_color_balance_get_balance_type (balance_element); + gst_object_unref (balance_element); + } + + return t; +} + +static void +gst_play_sink_colorbalance_init (gpointer g_iface, gpointer g_iface_data) +{ + GstColorBalanceInterface *iface = (GstColorBalanceInterface *) g_iface; + + iface->list_channels = gst_play_sink_colorbalance_list_channels; + iface->set_value = gst_play_sink_colorbalance_set_value; + iface->get_value = gst_play_sink_colorbalance_get_value; + iface->get_balance_type = gst_play_sink_colorbalance_get_balance_type; +} gboolean gst_play_sink_plugin_init (GstPlugin * plugin) { GST_DEBUG_CATEGORY_INIT (gst_play_sink_debug, "playsink", 0, "play bin"); - return gst_element_register (plugin, "playsink", GST_RANK_NONE, GST_TYPE_PLAY_SINK); }