return FALSE;
}
+
+/**
+ * ges_clip_asset_get_frame_time:
+ * @self: The object for which to compute timestamp for specifed frame
+ * @frame_number: The frame number we want the timestamp for the frame number
+ * inside the media scale of @self
+ *
+ * Converts the given frame number into a timestamp, using the "natural" frame
+ * rate of the asset.
+ *
+ * You can use this to reference a specific frame in a media file and use this
+ * as, for example, the `in-point` or `max-duration` of a #GESClip.
+ *
+ * Returns: The timestamp corresponding to @frame_number in the element source
+ * in the media scale, or #GST_CLOCK_TIME_NONE if the clip asset does not have a
+ * natural frame rate.
+ */
+GstClockTime
+ges_clip_asset_get_frame_time (GESClipAsset * self, GESFrameNumber frame_number)
+{
+ gint fps_n, fps_d;
+
+ g_return_val_if_fail (GES_IS_CLIP_ASSET (self), GST_CLOCK_TIME_NONE);
+ g_return_val_if_fail (GES_FRAME_NUMBER_IS_VALID (frame_number),
+ GST_CLOCK_TIME_NONE);
+
+
+ if (!ges_clip_asset_get_natural_framerate (self, &fps_n, &fps_d))
+ return GST_CLOCK_TIME_NONE;
+
+ return gst_util_uint64_scale_int_ceil (frame_number, fps_d * GST_SECOND,
+ fps_n);
+}
GESTrackType ges_clip_asset_get_supported_formats (GESClipAsset *self);
GES_API
gboolean ges_clip_asset_get_natural_framerate (GESClipAsset* self, gint* framerate_n, gint* framerate_d);
+GES_API
+GstClockTime ges_clip_asset_get_frame_time (GESClipAsset* self, GESFrameNumber frame_number);
G_END_DECLS
return ret;
}
+
+/**
+ * ges_clip_get_timeline_time_from_source_frame:
+ * @clip: A #GESClip
+ * @frame_number: The frame number to get the corresponding timestamp in the
+ * timeline coordinates
+ * @err: A #GError set on errors
+ *
+ * This method allows you to convert a frame number into a #GstClockTime, this
+ * can be used to either seek to a particular frame in the timeline or to later
+ * on edit @self with that timestamp.
+ *
+ * This method should be use specifically in the case where you want to trim the
+ * clip to a particular frame.
+ *
+ * The returned timestamp is in the global #GESTimeline time coordinates of @self, not
+ * in the internal time coordinates. In practice, this means that you can not use
+ * that time to set the clip #GESTimelineElement:in-point but it can be used in
+ * the timeline editing API, for example as the @position argument of the
+ * #ges_timeline_element_edit method.
+ *
+ * Note that you can get the frame timestamp of a particular clip asset with
+ * #ges_clip_asset_get_frame_time.
+ *
+ * Returns: The timestamp corresponding to @frame_number in the element source
+ * in the timeline coordinates.
+ */
+GstClockTime
+ges_clip_get_timeline_time_from_source_frame (GESClip * clip,
+ GESFrameNumber frame_number, GError ** err)
+{
+ GstClockTime frame_ts;
+ GESClipAsset *asset;
+ GstClockTimeDiff inpoint_diff;
+
+ g_return_val_if_fail (GES_IS_CLIP (clip), GST_CLOCK_TIME_NONE);
+ g_return_val_if_fail (!err || !*err, GST_CLOCK_TIME_NONE);
+
+ if (!GES_FRAME_NUMBER_IS_VALID (frame_number))
+ return GST_CLOCK_TIME_NONE;
+
+ asset = GES_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (clip)));
+ frame_ts = ges_clip_asset_get_frame_time (asset, frame_number);
+ if (!GST_CLOCK_TIME_IS_VALID (frame_ts))
+ return GST_CLOCK_TIME_NONE;
+
+ inpoint_diff = GST_CLOCK_DIFF (frame_ts, GES_TIMELINE_ELEMENT_INPOINT (clip));
+ if (GST_CLOCK_DIFF (inpoint_diff, _START (clip)) < 0) {
+ g_set_error (err, GES_ERROR, GES_ERROR_INVALID_FRAME_NUMBER,
+ "Requested frame %" G_GINT64_FORMAT
+ " would be outside the timeline.", frame_number);
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ return GST_CLOCK_DIFF (inpoint_diff, _START (clip));
+}
GES_API
GESClip* ges_clip_split (GESClip *clip, guint64 position);
-G_END_DECLS
\ No newline at end of file
+GES_API
+GstClockTime ges_clip_get_timeline_time_from_source_frame (GESClip * clip,
+ GESFrameNumber frame_number,
+ GError ** err);
+
+G_END_DECLS
goto done;
}
+ if (G_VALUE_TYPE (gvalue) == G_TYPE_STRING) {
+ const gchar *v = g_value_get_string (gvalue);
+ return v && v[0] == 'f';
+ }
+
if (G_VALUE_TYPE (gvalue) == GST_TYPE_CLOCK_TIME)
return 1;
GES_ERROR_ASSET_WRONG_ID,
GES_ERROR_ASSET_LOADING,
GES_ERROR_FORMATTER_MALFORMED_INPUT_FILE,
+ GES_ERROR_INVALID_FRAME_NUMBER,
} GESError;
G_END_DECLS
\ No newline at end of file
G_GNUC_INTERNAL void
timeline_create_transitions (GESTimeline * timeline, GESTrackElement * track_element);
+G_GNUC_INTERNAL void timeline_get_framerate(GESTimeline *self, gint *fps_n,
+ gint *fps_d);
+
G_GNUC_INTERNAL
void
track_resort_and_fill_gaps (GESTrack *track);
G_GNUC_INTERNAL void
ges_idle_add (GSourceFunc func, gpointer udata, GDestroyNotify notify);
+G_GNUC_INTERNAL gboolean
+ges_util_structure_get_clocktime (GstStructure *structure, const gchar *name,
+ GstClockTime *val, GESFrameNumber *frames);
+
/****************************************************
* GESContainer *
#endif
#include "ges-structured-interface.h"
+#include "ges-internal.h"
#include <string.h>
#define LAST_CONTAINER_QDATA g_quark_from_string("ges-structured-last-container")
#define LAST_CHILD_QDATA g_quark_from_string("ges-structured-last-child")
-static gboolean
-_get_clocktime (GstStructure * structure, const gchar * name, gpointer var)
-{
- gboolean found = FALSE;
- GstClockTime *val = (GstClockTime *) var;
-
- const GValue *gvalue = gst_structure_get_value (structure, name);
-
- if (gvalue) {
- if (G_VALUE_TYPE (gvalue) == GST_TYPE_CLOCK_TIME) {
- *val = (GstClockTime) g_value_get_uint64 (gvalue);
- found = TRUE;
- } else if (G_VALUE_TYPE (gvalue) == G_TYPE_UINT64) {
- *val = (GstClockTime) g_value_get_uint64 (gvalue);
- found = TRUE;
- } else if (G_VALUE_TYPE (gvalue) == G_TYPE_UINT) {
- *val = (GstClockTime) g_value_get_uint (gvalue);
- found = TRUE;
- } else if (G_VALUE_TYPE (gvalue) == G_TYPE_INT) {
- *val = (GstClockTime) g_value_get_int (gvalue);
- found = TRUE;
- } else if (G_VALUE_TYPE (gvalue) == G_TYPE_INT64) {
- *val = (GstClockTime) g_value_get_int64 (gvalue);
- found = TRUE;
- } else if (G_VALUE_TYPE (gvalue) == G_TYPE_DOUBLE) {
- gdouble d = g_value_get_double (gvalue);
-
- found = TRUE;
- if (d == -1.0)
- *val = GST_CLOCK_TIME_NONE;
- else {
- *val = d * GST_SECOND;
- *val = GST_ROUND_UP_4 (*val);
- }
- }
- }
-
- return found;
-}
-
#define GET_AND_CHECK(name,type,var,label) G_STMT_START {\
gboolean found = FALSE; \
\
if (type == GST_TYPE_CLOCK_TIME) {\
- found = _get_clocktime(structure,name,var);\
+ found = ges_util_structure_get_clocktime (structure,name, (GstClockTime*)var,NULL);\
}\
else { \
found = gst_structure_get (structure, name, type, var, NULL); \
*var = def; \
} G_STMT_END
-#define TRY_GET(name,type,var,def) G_STMT_START {\
- if (type == GST_TYPE_CLOCK_TIME) {\
- if (!_get_clocktime(structure,name,var))\
- *var = def; \
- } else if (!gst_structure_get (structure, name, type, var, NULL)) {\
- *var = def; \
- } \
+#define TRY_GET_TIME(name, var, var_frames, def) G_STMT_START { \
+ if (!ges_util_structure_get_clocktime (structure, name, var, var_frames)) { \
+ *var = def; \
+ *var_frames = GES_FRAME_NUMBER_NONE; \
+ } \
+} G_STMT_END
+
+#define TRY_GET(name, type, var, def) G_STMT_START {\
+ g_assert (type != GST_TYPE_CLOCK_TIME); \
+ if (!gst_structure_get (structure, name, type, var, NULL))\
+ *var = def; \
} G_STMT_END
typedef struct
gboolean res = FALSE;
GESTrackType track_types = GES_TRACK_TYPE_UNKNOWN;
+ GESFrameNumber start_frame = GES_FRAME_NUMBER_NONE, inpoint_frame =
+ GES_FRAME_NUMBER_NONE, duration_frame = GES_FRAME_NUMBER_NONE;
GstClockTime duration = 1 * GST_SECOND, inpoint = 0, start =
GST_CLOCK_TIME_NONE;
if (layer_priority == -1)
TRY_GET ("layer", G_TYPE_INT, &layer_priority, -1);
TRY_GET_STRING ("type", &type_string, "GESUriClip");
- TRY_GET ("start", GST_TYPE_CLOCK_TIME, &start, GST_CLOCK_TIME_NONE);
- TRY_GET ("inpoint", GST_TYPE_CLOCK_TIME, &inpoint, 0);
- TRY_GET ("duration", GST_TYPE_CLOCK_TIME, &duration, GST_CLOCK_TIME_NONE);
+ TRY_GET_TIME ("start", &start, &start_frame, GST_CLOCK_TIME_NONE);
+ TRY_GET_TIME ("inpoint", &inpoint, &inpoint_frame, 0);
+ TRY_GET_TIME ("duration", &duration, &duration_frame, GST_CLOCK_TIME_NONE);
TRY_GET_STRING ("track-types", &track_types_str, NULL);
TRY_GET_STRING ("project-uri", &nested_timeline_id, NULL);
goto beach;
}
+ if (GES_FRAME_NUMBER_IS_VALID (start_frame))
+ start = ges_timeline_get_frame_time (timeline, start_frame);
+
+ if (GES_FRAME_NUMBER_IS_VALID (inpoint_frame)) {
+ inpoint =
+ ges_clip_asset_get_frame_time (GES_CLIP_ASSET (asset), inpoint_frame);
+ if (!GST_CLOCK_TIME_IS_VALID (inpoint)) {
+ *error =
+ g_error_new (GES_ERROR, 0, "Could not get inpoint from frame %"
+ G_GINT64_FORMAT, inpoint_frame);
+ goto beach;
+ }
+ }
+
+ if (GES_FRAME_NUMBER_IS_VALID (duration_frame)) {
+ duration = ges_timeline_get_frame_time (timeline, duration_frame);
+ }
+
if (GES_IS_URI_CLIP_ASSET (asset) && !GST_CLOCK_TIME_IS_VALID (duration)) {
duration = GST_CLOCK_DIFF (inpoint,
ges_uri_clip_asset_get_duration (GES_URI_CLIP_ASSET (asset)));
}
+/* Accept @self == NULL, making it use default framerate */
+void
+timeline_get_framerate (GESTimeline * self, gint * fps_n, gint * fps_d)
+{
+ GList *tmp;
+
+ if (!self)
+ goto done;
+
+ *fps_n = *fps_d = -1;
+ LOCK_DYN (self);
+ for (tmp = self->tracks; tmp; tmp = tmp->next) {
+ if (GES_IS_VIDEO_TRACK (tmp->data)) {
+ GstCaps *restriction = ges_track_get_restriction_caps (tmp->data);
+ gint i;
+
+ for (i = 0; i < gst_caps_get_size (restriction); i++) {
+ gint n, d;
+
+ if (!gst_structure_get_fraction (gst_caps_get_structure (restriction,
+ i), "framerate", &n, &d))
+ continue;
+
+ if (*fps_n != -1 && *fps_d != -1 && !(n == *fps_n && d == *fps_d)) {
+ GST_WARNING_OBJECT (self,
+ "Various framerates specified, this is not supported"
+ " First one will be used.");
+ continue;
+ }
+
+ *fps_n = n;
+ *fps_d = d;
+ }
+ gst_caps_unref (restriction);
+ }
+ }
+ UNLOCK_DYN (self);
+
+done:
+ if (*fps_n == -1 && *fps_d == -1) {
+ GST_INFO_OBJECT (self,
+ "No framerate found, using default " G_STRINGIFY (FRAMERATE_N) "/ "
+ G_STRINGIFY (FRAMERATE_D));
+ *fps_n = DEFAULT_FRAMERATE_N;
+ *fps_d = DEFAULT_FRAMERATE_D;
+ }
+}
+
+
gboolean
ges_timeline_trim_object_simple (GESTimeline * timeline,
GESTimelineElement * element, guint32 new_layer_priority,
return TRUE;
}
+
+/**
+ * ges_timeline_get_frame_time:
+ * @self: The self on which to retrieve the timestamp for @frame_number
+ * @frame_number: The frame number to get the corresponding timestamp of in the
+ * timeline coordinates
+ *
+ * This method allows you to convert a timeline output frame number into a
+ * timeline #GstClockTime. For example, this time could be used to seek to a
+ * particular frame in the timeline's output, or as the edit position for
+ * an element within the timeline.
+ *
+ * Returns: The timestamp corresponding to @frame_number in the output of @self.
+ */
+GstClockTime
+ges_timeline_get_frame_time (GESTimeline * self, GESFrameNumber frame_number)
+{
+ gint fps_n, fps_d;
+
+ g_return_val_if_fail (GES_IS_TIMELINE (self), GST_CLOCK_TIME_NONE);
+ g_return_val_if_fail (GES_FRAME_NUMBER_IS_VALID (frame_number),
+ GST_CLOCK_TIME_NONE);
+
+ timeline_get_framerate (self, &fps_n, &fps_d);
+
+ return gst_util_uint64_scale_int_ceil (frame_number,
+ fps_d * GST_SECOND, fps_n);
+}
+
+/**
+ * ges_timeline_get_frame_at:
+ * @self: A #GESTimeline
+ * @timestamp: The timestamp to get the corresponding frame number of
+ *
+ * This method allows you to convert a timeline #GstClockTime into its
+ * corresponding #GESFrameNumber in the timeline's output.
+ *
+ * Returns: The frame number @timestamp corresponds to.
+ */
+GESFrameNumber
+ges_timeline_get_frame_at (GESTimeline * self, GstClockTime timestamp)
+{
+ gint fps_n, fps_d;
+
+ g_return_val_if_fail (GES_IS_TIMELINE (self), GES_FRAME_NUMBER_NONE);
+ g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (timestamp),
+ GES_FRAME_NUMBER_NONE);
+
+ timeline_get_framerate (self, &fps_n, &fps_d);
+
+ return gst_util_uint64_scale (timestamp, fps_n, fps_d * GST_SECOND);
+}
GES_API
gboolean ges_timeline_move_layer (GESTimeline *timeline, GESLayer *layer, guint new_layer_priority);
+GES_API
+GstClockTime ges_timeline_get_frame_time(GESTimeline *self,
+ GESFrameNumber frame_number);
+
+GES_API
+GESFrameNumber ges_timeline_get_frame_at (GESTimeline *self,
+ GstClockTime timestamp);
+
G_END_DECLS
*/
#define GES_PADDING_LARGE 20
+/**
+ * GESFrameNumber:
+ *
+ * A datatype to hold a frame number.
+ */
+typedef gint64 GESFrameNumber;
+
+/**
+ * GES_FRAME_NUMBER_NONE: (value 9223372036854775807) (type GESFrameNumber)
+ *
+ * Constant to define an undefined frame number
+ */
+#define GES_FRAME_NUMBER_NONE ((gint64) 9223372036854775807)
+
+/**
+ * GES_FRAME_NUMBER_IS_VALID:
+ * Tests if a given GESFrameNumber represents a valid frame
+ */
+#define GES_FRAME_NUMBER_IS_VALID(frames) (((GESFrameNumber) frames) != GES_FRAME_NUMBER_NONE)
+
+/**
+ * GES_TYPE_FRAME_NUMBER:
+ *
+ * The #GType of a #GESFrameNumber.
+ */
+#define GES_TYPE_FRAME_NUMBER G_TYPE_UINT64
+
/* Type definitions */
typedef struct _GESTimeline GESTimeline;
return (strstr (klass, "Compositor") != NULL);
}
+gboolean
+ges_util_structure_get_clocktime (GstStructure * structure, const gchar * name,
+ GstClockTime * val, GESFrameNumber * frames)
+{
+ gboolean found = FALSE;
+
+ const GValue *gvalue;
+
+ if (!val && !frames)
+ return FALSE;
+
+ gvalue = gst_structure_get_value (structure, name);
+ if (!gvalue)
+ return FALSE;
+
+ if (frames)
+ *frames = GES_FRAME_NUMBER_NONE;
+
+ found = TRUE;
+ if (val && G_VALUE_TYPE (gvalue) == GST_TYPE_CLOCK_TIME) {
+ *val = (GstClockTime) g_value_get_uint64 (gvalue);
+ } else if (val && G_VALUE_TYPE (gvalue) == G_TYPE_UINT64) {
+ *val = (GstClockTime) g_value_get_uint64 (gvalue);
+ } else if (val && G_VALUE_TYPE (gvalue) == G_TYPE_UINT) {
+ *val = (GstClockTime) g_value_get_uint (gvalue);
+ } else if (val && G_VALUE_TYPE (gvalue) == G_TYPE_INT) {
+ *val = (GstClockTime) g_value_get_int (gvalue);
+ } else if (val && G_VALUE_TYPE (gvalue) == G_TYPE_INT64) {
+ *val = (GstClockTime) g_value_get_int64 (gvalue);
+ } else if (val && G_VALUE_TYPE (gvalue) == G_TYPE_DOUBLE) {
+ gdouble d = g_value_get_double (gvalue);
+
+ if (d == -1.0)
+ *val = GST_CLOCK_TIME_NONE;
+ else
+ *val = d * GST_SECOND;
+ } else if (frames && G_VALUE_TYPE (gvalue) == G_TYPE_STRING) {
+ const gchar *str = g_value_get_string (gvalue);
+
+ found = FALSE;
+ if (str && str[0] == 'f') {
+ GValue v = G_VALUE_INIT;
+
+ g_value_init (&v, G_TYPE_UINT64);
+ if (gst_value_deserialize (&v, &str[1])) {
+ *frames = g_value_get_uint64 (&v);
+ if (val)
+ *val = GST_CLOCK_TIME_NONE;
+ found = TRUE;
+ }
+ g_value_reset (&v);
+ }
+ } else {
+ found = FALSE;
+
+ }
+
+ return found;
+}
GstElementFactory *
GError *error;
} LoadTimelineData;
+static gboolean
+_get_clocktime (GstStructure * structure, const gchar * name,
+ GstClockTime * val, GESFrameNumber * frames)
+{
+ const GValue *gvalue = gst_structure_get_value (structure, name);
+
+ if (!gvalue)
+ return FALSE;
+
+ if (frames && G_VALUE_TYPE (gvalue) == G_TYPE_STRING) {
+ const gchar *str = g_value_get_string (gvalue);
+
+ if (str && str[0] == 'f') {
+ GValue v = G_VALUE_INIT;
+
+ g_value_init (&v, G_TYPE_UINT64);
+ if (gst_value_deserialize (&v, &str[1])) {
+ *frames = g_value_get_uint64 (&v);
+ if (val)
+ *val = GST_CLOCK_TIME_NONE;
+ g_value_reset (&v);
+
+ return TRUE;
+ }
+ g_value_reset (&v);
+ }
+ }
+
+ if (!val)
+ return FALSE;
+
+ return gst_validate_utils_get_clocktime (structure, name, val);
+}
+
static void
project_loaded_cb (GESProject * project, GESTimeline * timeline,
LoadTimelineData * data)
{
GList *layers = NULL;
GESTimelineElement *element;
+ GESFrameNumber fposition = GES_FRAME_NUMBER_NONE;
GstClockTime position;
gboolean res = FALSE;
+ GError *err = NULL;
+ gboolean source_position = FALSE;
gint new_layer_priority = -1;
guint edge = GES_EDGE_NONE;
return GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
}
- if (!gst_validate_action_get_clocktime (scenario, action,
- "position", &position)) {
- GST_WARNING ("Could not get position");
- goto beach;
+ if (!_get_clocktime (action->structure, "position", &position, &fposition)) {
+ fposition = 0;
+ if (!gst_structure_get_int (action->structure, "source-frame",
+ (gint *) & fposition)
+ && !gst_structure_get_int64 (action->structure, "source-frame",
+ &fposition)) {
+ gchar *structstr = gst_structure_to_string (action->structure);
+
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR,
+ "could not find `position` or `source-frame` in %s", structstr);
+ g_free (structstr);
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
+ goto beach;
+ }
+
+ source_position = TRUE;
+ position = GST_CLOCK_TIME_NONE;
}
if ((edit_mode_str =
- gst_structure_get_string (action->structure, "edit-mode")))
- g_return_val_if_fail (gst_validate_utils_enum_from_str (GES_TYPE_EDIT_MODE,
- edit_mode_str, &mode), FALSE);
+ gst_structure_get_string (action->structure, "edit-mode"))) {
+ if (!gst_validate_utils_enum_from_str (GES_TYPE_EDIT_MODE, edit_mode_str,
+ &mode)) {
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR, "Could not get enum from %s",
+ edit_mode_str);
- if ((edge_str = gst_structure_get_string (action->structure, "edge")))
- g_return_val_if_fail (gst_validate_utils_enum_from_str (GES_TYPE_EDGE,
- edge_str, &edge), FALSE);
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
+ goto beach;
+ }
+ }
+
+ if ((edge_str = gst_structure_get_string (action->structure, "edge"))) {
+ if (!gst_validate_utils_enum_from_str (GES_TYPE_EDGE, edge_str, &edge)) {
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR,
+ "Could not get enum from %s", edge_str);
+
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
+ goto beach;
+ }
+ }
+
+ if (GES_FRAME_NUMBER_IS_VALID (fposition)) {
+ if (source_position) {
+ GESClip *clip = NULL;
+
+ if (GES_IS_CLIP (element))
+ clip = GES_CLIP (element);
+ else if (GES_IS_TRACK_ELEMENT (element))
+ clip = GES_CLIP (element->parent);
+
+ if (!clip) {
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR,
+ "Could not get find element to edit using source frame for %"
+ GST_PTR_FORMAT, action->structure);
+
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
+ goto beach;
+ }
+
+ position = ges_clip_get_timeline_time_from_source_frame (clip, fposition,
+ &err);
+ } else {
+ position = ges_timeline_get_frame_time (timeline, fposition);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (position)) {
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR,
+ "Invalid frame number '%" G_GINT64_FORMAT "': %s", fposition,
+ err->message);
+
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
+ goto beach;
+ }
+ }
gst_structure_get_int (action->structure, "new-layer-priority",
&new_layer_priority);
if (!(res = ges_timeline_element_edit (element, layers,
new_layer_priority, mode, edge, position))) {
- gst_object_unref (element);
+
+ gchar *fpositionstr = GES_FRAME_NUMBER_IS_VALID (fposition)
+ ? g_strdup_printf ("(%" G_GINT64_FORMAT ")", fposition)
+ : NULL;
+
+ GST_VALIDATE_REPORT_ACTION (scenario, action,
+ SCENARIO_ACTION_EXECUTION_ERROR,
+ "Could not edit '%s' to %" GST_TIME_FORMAT
+ "%s in %s mode, edge: %s "
+ "with new layer prio: %d",
+ element_name, GST_TIME_ARGS (position),
+ fpositionstr ? fpositionstr : "",
+ edit_mode_str ? edit_mode_str : "normal",
+ edge_str ? edge_str : "None", new_layer_priority);
+ g_free (fpositionstr);
+ res = GST_VALIDATE_EXECUTE_ACTION_ERROR_REPORTED;
goto beach;
}
- gst_object_unref (element);
SAVE_TIMELINE_IF_NEEDED (scenario, timeline, action);
+
beach:
+ gst_clear_object (&element);
+ g_clear_error (&err);
g_object_unref (timeline);
return res;
}
{
.name = "position",
.description = "The new position of the GESContainer",
- .mandatory = TRUE,
+ .mandatory = FALSE,
.types = "double or string",
.possible_variables = "position: The current position in the stream\n"
"duration: The duration of the stream",
{
.name = "position",
.description = "The new position of the element",
- .mandatory = TRUE,
+ .mandatory = FALSE,
.types = "double or string",
.possible_variables = "position: The current position in the stream\n"
"duration: The duration of the stream",
NULL
},
+ {
+ .name = "source-frame",
+ .description = "The new frame of the element, computed from the @element-name"
+ "clip's source frame.",
+ .mandatory = FALSE,
+ .types = "double or string",
+ NULL
+ },
{
.name = "edit-mode",
.description = "The GESEditMode to use to edit @element-name",
endforeach
if gstvalidate_dep.found()
- scenarios = ['check_video_track_restriction_scale', 'check_video_track_restriction_scale_with_keyframes']
+ scenarios = [
+ 'check_video_track_restriction_scale',
+ 'check_video_track_restriction_scale_with_keyframes',
+ 'check_edit_in_frames',
+ 'check_edit_in_frames_with_framerate_mismatch',
+ ]
foreach scenario: scenarios
scenario_file = join_paths(meson.current_source_dir(), 'scenarios', scenario + '.scenario')
--- /dev/null
+description, handles-states=true,
+ ges-options={\
+ --track-types, video\
+ }
+
+add-clip, name=clip, asset-id=GESTestClip, layer-priority=0, type=GESTestClip, start=f0, inpoint=f30, duration=f60
+
+check-ges-properties, element-name=clip, start=0, in-point=1.0, duration=2.0
+edit, element-name=clip, position=f30
+
+check-ges-properties, element-name=clip, start=1.0, in-point=1.0, duration=2.0
+
+# Getting the 60th frame in the input media file, means inpoint=f30 + f30 = f60
+edit, element-name=clip, position=f60, edit-mode=edit_trim, edge=edge_end
+check-ges-properties, element-name=clip, start=1.0, in-point=1.0, duration=1.0
+
+set-track-restriction-caps, track-type=video, caps="video/x-raw,width=1280,height=720,framerate=60/1"
+
+# 60 frames in media time, meaning 90 - inpoint (30) / 30 = 2 seconds
+edit, element-name=clip, source-frame=90, edit-mode=edit_trim, edge=edge_end
+check-ges-properties, element-name=clip, start=1.0, in-point=1.0, duration=2.0
+
+# 60 frames in timeline time, meaning 60/60 = 1 second
+edit, element-name=clip, position=f60
+check-ges-properties, element-name=clip, start=1.0, in-point=1.0, duration=2.0
+
+# 60 frames in timeline time, meaning 60/60 = 1 second
+edit, element-name=clip, source-frame=75, edit-mode=trim, edge=start
+check-ges-properties, element-name=clip, start=2.5, in-point=2.5, duration=0.5
+
+stop
\ No newline at end of file
--- /dev/null
+description, handles-states=true,
+ ges-options={\
+ --track-types, video,
+ --disable-mixing,
+ "--videosink=fakevideosink"\
+ }
+
+add-clip, name=clip, asset-id="framerate=120/1", layer-priority=0, type=GESTestClip, pattern=blue, duration=f240, inpoint=f100
+set-child-properties, element-name=clip, time-mode=time-code
+pause
+
+check-last-sample, sinkpad-caps="video/x-raw", timecode-frame-number=100
+
+edit, element-name=clip, edit-mode=normal, position=1.0
+
+edit, element-name=clip, edit-mode=edit_trim, source-frame=60
+edit, element-name=clip, position=0
+commit;
+check-last-sample, sinkpad-caps="video/x-raw", timecode-frame-number=60
+
+edit, element-name=clip, edit-mode=edit_trim, edit-edge=end, source-frame=120
+check-ges-properties, element-name=clip, start=0.5
+stop
\ No newline at end of file