* @short_description: outputs the video stream from a media file as a still
* image.
*
- * Outputs the video stream from a given file as a still frame. The frame
- * chosen will be determined by the in-point property on the track element. For
- * image files, do not set the in-point property.
+ * Outputs the video stream from a given file as a still frame. The frame chosen
+ * will be determined by the in-point property on the track element. For image
+ * files, do not set the in-point property.
+ *
+ * Deprecated: 1.18: This won't be used anymore and has been replaced by
+ * #GESUriSource instead which now plugs an `imagefreeze` element when
+ * #ges_uri_source_asset_is_image returns %TRUE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
self->priv = ges_image_source_get_instance_private (self);
}
-/**
- * ges_image_source_new:
- * @uri: the URI the source should control
+/* @uri: the URI the source should control
*
* Creates a new #GESImageSource for the provided @uri.
*
GESImageSource *
ges_image_source_new (gchar * uri)
{
- return g_object_new (GES_TYPE_IMAGE_SOURCE, "uri", uri, "track-type",
- GES_TRACK_TYPE_VIDEO, NULL);
+ GESImageSource *res;
+ GESAsset *asset = ges_asset_request (GES_TYPE_IMAGE_SOURCE, uri, NULL);
+
+ res = GES_IMAGE_SOURCE (ges_asset_extract (asset, NULL));
+ res->uri = g_strdup (uri);
+ gst_object_unref (asset);
+
+ return res;
}
* @title: GESMultiFileSource
* @short_description: outputs the video stream from a sequence of images.
*
- * Outputs the video stream from a given image sequence. The start frame
- * chosen will be determined by the in-point property on the track element.
+ * Outputs the video stream from a given image sequence. The start frame chosen
+ * will be determined by the in-point property on the track element.
+ *
+ * This should not be used anymore, the `imagesequence://` protocol should be
+ * used instead. Check the #imagesequencesrc GStreamer element for more
+ * information.
+ *
+ * Deprecated: 1.18: Use #GESUriSource instead
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
self->priv = ges_multi_file_source_get_instance_private (self);
}
-/**
- * ges_multi_file_source_new:
- * @uri: the URI the source should control
+/* @uri: the URI the source should control
*
* Creates a new #GESMultiFileSource for the provided @uri.
*
GESMultiFileSource *
ges_multi_file_source_new (gchar * uri)
{
- return g_object_new (GES_TYPE_MULTI_FILE_SOURCE, "uri", uri,
- "track-type", GES_TRACK_TYPE_VIDEO, NULL);
+ GESMultiFileSource *res;
+ GESAsset *asset = ges_asset_request (GES_TYPE_MULTI_FILE_SOURCE, uri, NULL);
+
+ res = GES_MULTI_FILE_SOURCE (ges_asset_extract (asset, NULL));
+ res->uri = g_strdup (uri);
+ gst_object_unref (asset);
+
+ return res;
}
_create_uri_source_asset (GESUriClipAsset * asset,
GstDiscovererStreamInfo * sinfo, GESTrackType type)
{
- GESAsset *tck_filesource_asset;
- GESUriSourceAssetPrivate *priv_tckasset;
+ GESAsset *src_asset;
+ GESUriSourceAssetPrivate *src_priv;
GESUriClipAssetPrivate *priv = asset->priv;
gchar *stream_id =
g_strdup (gst_discoverer_stream_info_get_stream_id (sinfo));
}
if (type == GES_TRACK_TYPE_VIDEO)
- tck_filesource_asset = ges_asset_request (GES_TYPE_VIDEO_URI_SOURCE,
- stream_id, NULL);
+ src_asset = ges_asset_request (GES_TYPE_VIDEO_URI_SOURCE, stream_id, NULL);
else
- tck_filesource_asset = ges_asset_request (GES_TYPE_AUDIO_URI_SOURCE,
- stream_id, NULL);
+ src_asset = ges_asset_request (GES_TYPE_AUDIO_URI_SOURCE, stream_id, NULL);
g_free (stream_id);
- priv_tckasset = GES_URI_SOURCE_ASSET (tck_filesource_asset)->priv;
- priv_tckasset->uri = ges_asset_get_id (GES_ASSET (asset));
- priv_tckasset->sinfo = gst_object_ref (sinfo);
- priv_tckasset->parent_asset = asset;
+ src_priv = GES_URI_SOURCE_ASSET (src_asset)->priv;
+ src_priv->uri = ges_asset_get_id (GES_ASSET (asset));
+ src_priv->sinfo = gst_object_ref (sinfo);
+ src_priv->parent_asset = asset;
ges_track_element_asset_set_track_type (GES_TRACK_ELEMENT_ASSET
- (tck_filesource_asset), type);
+ (src_asset), type);
- priv->asset_trackfilesources = g_list_append (priv->asset_trackfilesources,
- tck_filesource_asset);
+ priv->is_image |=
+ ges_uri_source_asset_is_image (GES_URI_SOURCE_ASSET (src_asset));
+ priv->asset_trackfilesources =
+ g_list_append (priv->asset_trackfilesources, src_asset);
}
static void
supportedformats = GES_TRACK_TYPE_VIDEO;
else
supportedformats |= GES_TRACK_TYPE_VIDEO;
- if (gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
- sinf))
- priv->is_image = TRUE;
type = GES_TRACK_TYPE_VIDEO;
}
uri = g_strdup (priv->uri);
- if (g_str_has_prefix (priv->uri, GES_MULTI_FILE_URI_PREFIX)) {
+ if (g_str_has_prefix (priv->uri, GES_MULTI_FILE_URI_PREFIX))
trackelement = GES_TRACK_ELEMENT (ges_multi_file_source_new (uri));
- } else if (GST_IS_DISCOVERER_VIDEO_INFO (priv->sinfo)
- && gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
- priv->sinfo))
- trackelement = GES_TRACK_ELEMENT (ges_image_source_new (uri));
else if (GST_IS_DISCOVERER_VIDEO_INFO (priv->sinfo))
trackelement = GES_TRACK_ELEMENT (ges_video_uri_source_new (uri));
else
return asset->priv->parent_asset;
}
+/**
+ * ges_uri_source_asset_is_image:
+ * @asset: A #GESUriClipAsset
+ *
+ * Check if @asset contains a single image
+ *
+ * Returns: %TRUE if the video stream corresponds to an image (i.e. only
+ * contains one frame)
+ */
+gboolean
+ges_uri_source_asset_is_image (GESUriSourceAsset * asset)
+{
+ g_return_val_if_fail (GES_IS_URI_SOURCE_ASSET (asset), FALSE);
+
+ if (!GST_IS_DISCOVERER_VIDEO_INFO (asset->priv->sinfo))
+ return FALSE;
+
+ return gst_discoverer_video_info_is_image ((GstDiscovererVideoInfo *)
+ asset->priv->sinfo);
+}
+
void
_ges_uri_asset_cleanup (void)
{
const gchar * ges_uri_source_asset_get_stream_uri (GESUriSourceAsset *asset);
GES_API
const GESUriClipAsset *ges_uri_source_asset_get_filesource_asset (GESUriSourceAsset *asset);
+GES_API
+gboolean ges_uri_source_asset_is_image (GESUriSourceAsset *asset);
G_END_DECLS
\ No newline at end of file
gst_element_post_message (element, msg);
}
-static GstElement *
-ges_video_source_create_element (GESTrackElement * trksrc)
+static gboolean
+ges_video_source_create_filters (GESVideoSource * self, GPtrArray * elements,
+ gboolean needs_converters)
{
- GstElement *topbin;
- GstElement *sub_element;
- GESVideoSourceClass *source_class = GES_VIDEO_SOURCE_GET_CLASS (trksrc);
- GESVideoSource *self;
+ GESTrackElement *trksrc = GES_TRACK_ELEMENT (self);
GstElement *positioner, *videoflip, *capsfilter, *deinterlace;
const gchar *positioner_props[] =
{ "alpha", "posx", "posy", "width", "height", NULL };
const gchar *deinterlace_props[] = { "mode", "fields", "tff", NULL };
const gchar *videoflip_props[] = { "video-direction", NULL };
- gboolean needs_converters = TRUE;
- GPtrArray *elements;
-
- if (!source_class->create_source)
- return NULL;
- sub_element = source_class->create_source (trksrc);
-
- self = (GESVideoSource *) trksrc;
- if (source_class->ABI.abi.needs_converters)
- needs_converters = source_class->ABI.abi.needs_converters (self);
-
- elements = g_ptr_array_new ();
g_ptr_array_add (elements, gst_element_factory_make ("queue", NULL));
/* That positioner will add metadata to buffers according to its
deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
if (deinterlace == NULL) {
- post_missing_element_message (sub_element, "deinterlace");
+ post_missing_element_message (ges_track_element_get_nleobject (trksrc),
+ "deinterlace");
- GST_ELEMENT_WARNING (sub_element, CORE, MISSING_PLUGIN,
+ GST_ELEMENT_WARNING (ges_track_element_get_nleobject (trksrc), CORE,
+ MISSING_PLUGIN,
("Missing element '%s' - check your GStreamer installation.",
"deinterlace"), ("deinterlacing won't work"));
} else {
ges_track_element_add_children_props (trksrc, deinterlace, NULL, NULL,
deinterlace_props);
}
- topbin = ges_source_create_topbin ("videosrcbin", sub_element, elements);
- g_ptr_array_free (elements, TRUE);
self->priv->positioner = GST_FRAME_POSITIONNER (positioner);
self->priv->positioner->scale_in_compositor =
self->priv->capsfilter = capsfilter;
+ return TRUE;
+}
+
+static GstElement *
+ges_video_source_create_element (GESTrackElement * trksrc)
+{
+ GstElement *topbin;
+ GstElement *sub_element;
+ GESVideoSourceClass *source_class = GES_VIDEO_SOURCE_GET_CLASS (trksrc);
+ GESVideoSource *self;
+ gboolean needs_converters = TRUE;
+ GPtrArray *elements;
+
+ if (!source_class->create_source)
+ return NULL;
+
+ sub_element = source_class->create_source (trksrc);
+
+ self = (GESVideoSource *) trksrc;
+ if (source_class->ABI.abi.needs_converters)
+ needs_converters = source_class->ABI.abi.needs_converters (self);
+
+ elements = g_ptr_array_new ();
+ g_assert (source_class->ABI.abi.create_filters);
+ if (!source_class->ABI.abi.create_filters (self, elements, needs_converters)) {
+ g_ptr_array_free (elements, TRUE);
+
+ return NULL;
+ }
+
+ topbin = ges_source_create_topbin ("videosrcbin", sub_element, elements);
+ g_ptr_array_free (elements, TRUE);
+
return topbin;
}
track_element_class->ABI.abi.default_track_type = GES_TRACK_TYPE_VIDEO;
video_source_class->create_source = NULL;
+ video_source_class->ABI.abi.create_filters = ges_video_source_create_filters;
}
static void
gboolean disable_scale_in_compositor;
gboolean (*needs_converters)(GESVideoSource *self);
gboolean (*get_natural_size)(GESVideoSource* self, gint* width, gint* height);
+ gboolean (*create_filters)(GESVideoSource *self, GPtrArray *filters, gboolean needs_converters);
} abi;
} ABI;
};
/* GObject VMethods */
+static gboolean
+ges_video_uri_source_create_filters (GESVideoSource * source,
+ GPtrArray * elements, gboolean needs_converters)
+{
+ GESAsset *asset = ges_extractable_get_asset (GES_EXTRACTABLE (source));
+
+ g_assert (GES_IS_URI_SOURCE_ASSET (asset));
+ if (!GES_VIDEO_SOURCE_CLASS (ges_video_uri_source_parent_class)
+ ->ABI.abi.create_filters (source, elements, needs_converters))
+ return FALSE;
+
+ if (ges_uri_source_asset_is_image (GES_URI_SOURCE_ASSET (asset)))
+ g_ptr_array_add (elements, gst_element_factory_make ("imagefreeze", NULL));
+
+ return TRUE;
+}
static void
ges_video_uri_source_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
- GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object);
+ GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
switch (property_id) {
case PROP_URI:
- g_value_set_string (value, uriclip->uri);
+ g_value_set_string (value, urisource->uri);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
ges_video_uri_source_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
- GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object);
+ GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
switch (property_id) {
case PROP_URI:
- if (uriclip->uri) {
- GST_WARNING_OBJECT (object, "Uri already set to %s", uriclip->uri);
+ if (urisource->uri) {
+ GST_WARNING_OBJECT (object, "Uri already set to %s", urisource->uri);
return;
}
- uriclip->uri = g_value_dup_string (value);
+ urisource->uri = g_value_dup_string (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
static void
ges_video_uri_source_dispose (GObject * object)
{
- GESVideoUriSource *uriclip = GES_VIDEO_URI_SOURCE (object);
+ GESVideoUriSource *urisource = GES_VIDEO_URI_SOURCE (object);
- if (uriclip->uri)
- g_free (uriclip->uri);
+ g_free (urisource->uri);
G_OBJECT_CLASS (ges_video_uri_source_parent_class)->dispose (object);
}
ges_video_uri_source_needs_converters;
source_class->ABI.abi.get_natural_size =
ges_video_uri_source_get_natural_size;
+ source_class->ABI.abi.create_filters = ges_video_uri_source_create_filters;
}
static void
fail_unless (GES_TIMELINE_ELEMENT_PARENT (track_element) ==
GES_TIMELINE_ELEMENT (clip));
fail_unless (ges_track_element_get_track (track_element) == v);
- fail_unless (GES_IS_IMAGE_SOURCE (track_element));
+ fail_unless (GES_IS_VIDEO_URI_SOURCE (track_element));
ASSERT_OBJECT_REFCOUNT (track_element, "1 in track, 1 in clip 2 in timeline",
3);
self.assertTrue(clip2.remove(clip2_child))
self.assertTrue(self.layer.remove_clip(clip2))
- self.assertTrue(clip1.add(clip2_child))
\ No newline at end of file
+ self.assertTrue(clip1.add(clip2_child))
+
+ def test_image_source_asset(self):
+ asset = GES.UriClipAsset.request_sync(common.get_asset_uri("png.png"))
+ clip = self.layer.add_asset(asset, 0, 0, Gst.SECOND, GES.TrackType.UNKNOWN)
+
+ image_src, = clip.get_children(True)
+
+ self.assertTrue(image_src.get_asset().is_image())
+ self.assertTrue(isinstance(image_src, GES.VideoUriSource))
+ imagefreeze, = [e for e in image_src.get_nleobject().iterate_recurse()
+ if e.get_factory().get_name() == "imagefreeze"]
+
+ asset = GES.UriClipAsset.request_sync(common.get_asset_uri("audio_video.ogg"))
+ clip = self.layer.add_asset(asset, Gst.SECOND, 0, Gst.SECOND, GES.TrackType.VIDEO)
+ video_src, = clip.get_children(True)
+ self.assertEqual([e for e in video_src.get_nleobject().iterate_recurse()
+ if e.get_factory().get_name() == "imagefreeze"], [])
\ No newline at end of file