_track_changed_cb (GESTrackElement * track_element,
GParamSpec * arg G_GNUC_UNUSED, GESAutoTransition * self)
{
+ if (self->frozen) {
+ GST_LOG_OBJECT (self, "Not updating because frozen");
+ return;
+ }
+
if (ges_track_element_get_track (track_element) == NULL) {
GST_DEBUG_OBJECT (self, "Neighboor %" GST_PTR_FORMAT
" removed from track ... auto destructing", track_element);
}
void
-ges_auto_transition_set_previous_source (GESAutoTransition * self,
- GESTrackElement * source)
+ges_auto_transition_set_source (GESAutoTransition * self,
+ GESTrackElement * source, GESEdge edge)
{
_disconnect_from_source (self, self->previous_source);
_connect_to_source (self, source);
- self->previous_source = source;
+
+ if (edge == GES_EDGE_END)
+ self->next_source = source;
+ else
+ self->previous_source = source;
}
static void
gst_object_ref (track));
trans = timeline ?
- ges_timeline_get_auto_transition_at_end (timeline, orig) : NULL;
+ ges_timeline_get_auto_transition_at_edge (timeline, orig,
+ GES_EDGE_END) : NULL;
if (trans) {
trans->frozen = TRUE;
- ges_auto_transition_set_previous_source (trans, copy);
+ ges_auto_transition_set_source (trans, copy, GES_EDGE_START);
transitions = g_list_append (transitions, trans);
}
}
ges_timeline_freeze_auto_transitions (GESTimeline * timeline, gboolean freeze);
G_GNUC_INTERNAL GESAutoTransition *
-ges_timeline_get_auto_transition_at_end (GESTimeline * timeline, GESTrackElement * source);
+ges_timeline_get_auto_transition_at_edge (GESTimeline * timeline, GESTrackElement * source,
+ GESEdge edge);
G_GNUC_INTERNAL gboolean ges_timeline_is_disposed (GESTimeline* timeline);
ges_timeline_get_smart_rendering (GESTimeline *timeline);
G_GNUC_INTERNAL void
-ges_auto_transition_set_previous_source (GESAutoTransition * self, GESTrackElement * source);
+ges_auto_transition_set_source (GESAutoTransition * self, GESTrackElement * source, GESEdge edge);
ges_timeline_create_transition (timeline, prev, next, NULL, layer,
_START (next), duration);
} else {
- GST_INFO ("Already have transition %" GES_FORMAT " between %" GES_FORMAT
- " and %" GES_FORMAT, GES_ARGS (trans), GES_ARGS (prev),
- GES_ARGS (next));
+ GST_INFO ("Already have transition %" GST_PTR_FORMAT " between %" GES_FORMAT
+ " and %" GES_FORMAT, trans, GES_ARGS (prev), GES_ARGS (next));
}
}
}
GESAutoTransition *
-ges_timeline_get_auto_transition_at_end (GESTimeline * timeline,
- GESTrackElement * source)
+ges_timeline_get_auto_transition_at_edge (GESTimeline * timeline,
+ GESTrackElement * source, GESEdge edge)
{
GList *tmp, *auto_transitions;
GESAutoTransition *ret = NULL;
/* We already have a transition linked to one of the elements we want to
* find a transition for */
- if (auto_trans->previous_source == source) {
+ if (edge == GES_EDGE_END && auto_trans->previous_source == source) {
+ ret = gst_object_ref (auto_trans);
+ break;
+ } else if (edge == GES_EDGE_START && auto_trans->next_source == source) {
ret = gst_object_ref (auto_trans);
break;
}
return g_strdup (GES_URI_CLIP (self)->priv->uri);
}
+static GList *
+get_auto_transitions_around_source (GESTrackElement * child)
+{
+ GList *transitions = NULL;
+ GESTimeline *timeline = GES_TIMELINE_ELEMENT_TIMELINE (child);
+ gint i;
+ GESEdge edges[] = { GES_EDGE_START, GES_EDGE_END };
+
+ if (!timeline)
+ return NULL;
+
+ for (i = 0; i < G_N_ELEMENTS (edges); i++) {
+ GESAutoTransition *transition =
+ ges_timeline_get_auto_transition_at_edge (timeline, child, edges[i]);
+ if (transition)
+ transitions = g_list_prepend (transitions, transition);
+ }
+
+ return transitions;
+}
+
static gboolean
extractable_set_asset (GESExtractable * self, GESAsset * asset)
{
GESTimelineElement *element = GES_TIMELINE_ELEMENT (self);
GESLayer *layer = ges_clip_get_layer (clip);
GList *tmp, *children;
- GHashTable *source_by_track;
+ GHashTable *source_by_track, *auto_transitions_on_sources;
GstClockTime max_duration;
GESAsset *prev_asset;
+ GList *transitions = NULL;
+ GESTimeline *timeline = GES_TIMELINE_ELEMENT_TIMELINE (self);
g_return_val_if_fail (GES_IS_URI_CLIP_ASSET (asset), FALSE);
source_by_track = g_hash_table_new_full (NULL, NULL,
gst_object_unref, gst_object_unref);
- children = ges_container_get_children (container, FALSE);
+ auto_transitions_on_sources = g_hash_table_new_full (NULL, NULL,
+ gst_object_unref, (GDestroyNotify) g_list_free);
+ if (timeline)
+ ges_timeline_freeze_auto_transitions (timeline, TRUE);
+
+ children = ges_container_get_children (container, FALSE);
for (tmp = children; tmp; tmp = tmp->next) {
GESTrackElement *child = tmp->data;
- GESTrack *track = ges_track_element_get_track (child);
+ GESTrack *track;
+
/* remove our core children */
- if (ges_track_element_is_core (child)) {
- if (track)
- g_hash_table_insert (source_by_track, gst_object_ref (track),
- gst_object_ref (child));
-
- /* removing the track element from its clip whilst it is in a
- * timeline will remove it from its track */
- /* removing the core element will also empty its non-core siblings
- * from the same track */
- ges_container_remove (container, GES_TIMELINE_ELEMENT (child));
- }
+ if (!ges_track_element_is_core (child))
+ continue;
+
+ track = ges_track_element_get_track (child);
+ if (track)
+ g_hash_table_insert (source_by_track, gst_object_ref (track),
+ gst_object_ref (child));
+
+ transitions = get_auto_transitions_around_source (child);
+ if (transitions)
+ g_hash_table_insert (auto_transitions_on_sources, gst_object_ref (child),
+ transitions);
+
+ /* removing the track element from its clip whilst it is in a
+ * timeline will remove it from its track */
+ /* removing the core element will also empty its non-core siblings
+ * from the same track */
+ ges_container_remove (container, GES_TIMELINE_ELEMENT (child));
}
g_list_free_full (children, g_object_unref);
* the same source! */
for (tmp = container->children; tmp; tmp = tmp->next) {
GESTrackElement *child = tmp->data;
- if (ges_track_element_is_core (child)) {
- GESTrackElement *orig_source = g_hash_table_lookup (source_by_track,
- ges_track_element_get_track (child));
- contains_core = TRUE;
-
- if (orig_source) {
- ges_track_element_copy_properties (GES_TIMELINE_ELEMENT
- (orig_source), GES_TIMELINE_ELEMENT (child));
- ges_track_element_copy_bindings (orig_source, child,
- GST_CLOCK_TIME_NONE);
- }
+ GESTrackElement *orig_source;
+
+ if (!ges_track_element_is_core (child))
+ continue;
+
+ contains_core = TRUE;
+ orig_source = g_hash_table_lookup (source_by_track,
+ ges_track_element_get_track (child));
+
+ if (!orig_source)
+ continue;
+
+ ges_track_element_copy_properties (GES_TIMELINE_ELEMENT
+ (orig_source), GES_TIMELINE_ELEMENT (child));
+ ges_track_element_copy_bindings (orig_source, child,
+ GST_CLOCK_TIME_NONE);
+
+ transitions =
+ g_hash_table_lookup (auto_transitions_on_sources, orig_source);
+ for (; transitions; transitions = transitions->next) {
+ GESAutoTransition *transition = transitions->data;
+
+ if (transition->previous_source == orig_source)
+ ges_auto_transition_set_source (transition, child, GES_EDGE_START);
+ else if (transition->next_source == orig_source)
+ ges_auto_transition_set_source (transition, child, GES_EDGE_END);
}
}
} else {
gst_object_unref (layer);
}
g_hash_table_unref (source_by_track);
+ g_hash_table_unref (auto_transitions_on_sources);
+
+ if (timeline)
+ ges_timeline_freeze_auto_transitions (timeline, FALSE);
if (res) {
g_free (uriclip->priv->uri);
import unittest # noqa
import tempfile # noqa
+try:
+ gi.require_version("GstTranscoder", "1.0")
+ from gi.repository import GstTranscoder
+except ValueError:
+ GstTranscoder = None
+
Gst.init(None)
GES.init()
os.remove(xges_path)
+def can_generate_assets():
+ if GstTranscoder is None:
+ return False, "GstTranscoder is not available"
+
+ if not Gst.ElementFactory.make("testsrcbin"):
+ return False, "testbinsrc is not available"
+
+ return True, None
+
+
+@contextlib.contextmanager
+def created_video_asset(uri=None, num_bufs=30):
+ with tempfile.NamedTemporaryFile(suffix=".ogg") as f:
+ if not uri:
+ uri = Gst.filename_to_uri(f.name)
+ transcoder = GstTranscoder.Transcoder.new("testbin://video,num-buffers=%s" % num_bufs,
+ uri, "application/ogg:video/x-theora:audio/x-vorbis")
+ transcoder.run()
+
+ yield uri
+
+
def get_asset_uri(name):
python_tests_dir = os.path.dirname(os.path.abspath(__file__))
assets_dir = os.path.join(python_tests_dir, "..", "assets")
while len(self.timeline.get_layers()) < layer + 1:
self.timeline.append_layer()
layer = self.timeline.get_layers()[layer]
- clip = GES.Asset.request(asset_type, asset_id).extract()
+ if asset_type == GES.UriClip:
+ asset = GES.UriClipAsset.request_sync(asset_id)
+ else:
+ asset = GES.Asset.request(asset_type, asset_id)
+ clip = asset.extract()
clip.props.start = layer.get_duration()
clip.props.duration = 10
self.assertTrue(layer.add_clip(clip))
import unittest # noqa
from unittest import mock
-try:
- gi.require_version("GstTranscoder", "1.0")
- from gi.repository import GstTranscoder
-except ValueError:
- GstTranscoder = None
-
from . import common
from .common import GESSimpleTimelineTest # noqa
asset = proj.create_asset_sync("file:///png.png", GES.UriClip)
self.assertIsNotNone(asset)
- @unittest.skipIf(GstTranscoder is None, "GstTranscoder is not available")
- @unittest.skipIf(Gst.ElementFactory.make("testsrcbin") is None, "testbinsrc is not available")
+ @unittest.skipUnless(*common.can_generate_assets())
def test_reload_asset(self):
- with tempfile.NamedTemporaryFile(suffix=".ogg") as f:
- uri = Gst.filename_to_uri(f.name)
- transcoder = GstTranscoder.Transcoder.new("testbin://video,num-buffers=30",
- uri, "application/ogg:video/x-theora:audio/x-vorbis")
- transcoder.run()
-
+ with common.created_video_asset() as uri:
asset0 = GES.UriClipAsset.request_sync(uri)
self.assertEqual(asset0.props.duration, Gst.SECOND)
- transcoder = GstTranscoder.Transcoder.new("testbin://video,num-buffers=60",
- uri, "application/ogg:video/x-theora:audio/x-vorbis")
- transcoder.run()
-
- GES.Asset.needs_reload(GES.UriClip, uri)
- asset1 = GES.UriClipAsset.request_sync(uri)
- self.assertEqual(asset1.props.duration, 2 * Gst.SECOND)
- self.assertEqual(asset1, asset0)
-
- transcoder = GstTranscoder.Transcoder.new("testbin://video,num-buffers=90",
- uri, "application/ogg:video/x-theora:audio/x-vorbis")
- transcoder.run()
- mainloop = common.create_main_loop()
- def asset_loaded_cb(_, res, mainloop):
- asset2 = GES.Asset.request_finish(res)
- self.assertEqual(asset2.props.duration, 3 * Gst.SECOND)
- self.assertEqual(asset2, asset0)
- mainloop.quit()
-
- GES.Asset.needs_reload(GES.UriClip, uri)
- GES.Asset.request_async(GES.UriClip, uri, None, asset_loaded_cb, mainloop)
- mainloop.run()
+ with common.created_video_asset(uri, 60) as uri:
+ GES.Asset.needs_reload(GES.UriClip, uri)
+ asset1 = GES.UriClipAsset.request_sync(uri)
+ self.assertEqual(asset1.props.duration, 2 * Gst.SECOND)
+ self.assertEqual(asset1, asset0)
+
+ with common.created_video_asset(uri, 90) as uri:
+ mainloop = common.create_main_loop()
+ def asset_loaded_cb(_, res, mainloop):
+ asset2 = GES.Asset.request_finish(res)
+ self.assertEqual(asset2.props.duration, 3 * Gst.SECOND)
+ self.assertEqual(asset2, asset0)
+ mainloop.quit()
+
+ GES.Asset.needs_reload(GES.UriClip, uri)
+ GES.Asset.request_async(GES.UriClip, uri, None, asset_loaded_cb, mainloop)
+ mainloop.run()
def test_asset_metadata_on_reload(self):
mainloop = GLib.MainLoop()
]
])
+ @unittest.skipUnless(*common.can_generate_assets())
+ def test_auto_transition_type_after_setting_proxy_asset(self):
+ self.track_types = [GES.TrackType.VIDEO]
+ super().setUp()
+
+ self.timeline.props.auto_transition = True
+ with common.created_video_asset() as uri:
+ self.append_clip(asset_type=GES.UriClip, asset_id=uri)
+ self.append_clip(asset_type=GES.UriClip, asset_id=uri).props.start = 5
+ clip1, transition, clip2 = self.layer.get_clips()
+ video_transition, = transition.get_children(True)
+ video_transition.set_transition_type(GES.VideoStandardTransitionType.BAR_WIPE_LR)
+ self.assertEqual(video_transition.get_transition_type(), GES.VideoStandardTransitionType.BAR_WIPE_LR)
+
+ with common.created_video_asset() as uri2:
+ proxy_asset = GES.UriClipAsset.request_sync(uri2)
+ clip1.set_asset(proxy_asset)
+ clip1, transition1, clip2 = self.layer.get_clips()
+
+ video_transition1, = transition1.get_children(True)
+ self.assertEqual(video_transition, video_transition1)
+ self.assertEqual(video_transition.get_transition_type(), GES.VideoStandardTransitionType.BAR_WIPE_LR)
+
def test_frame_info(self):
self.track_types = [GES.TrackType.VIDEO]
super().setUp()
def _snapped_cb(timeline, elem1, elem2, position):
self.snapped_at.append(position)
- Gst.error('%s' % position)
def _snapped_end_cb(timeline, elem1, elem2, position):
if self.snapped_at: # Ignoring first snap end.
self.snapped_at.append(Gst.CLOCK_TIME_NONE)
- Gst.error('%s' % position)
self.timeline.connect("snapping-started", _snapped_cb)
self.timeline.connect("snapping-ended", _snapped_end_cb)