#include <gst/math-compat.h>
#ifdef HAVE_ZLIB
-# include <zlib.h>
+#include <zlib.h>
#endif
/* max. size considered 'sane' for non-mdat atoms */
gboolean keyframe; /* TRUE when this packet is a keyframe */
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
+
+struct _QtDemuxSphericalMetadata
+{
+ gboolean is_spherical;
+ gboolean is_stitched;
+ char *stitching_software;
+ char *projection_type;
+ char *stereo_mode;
+ int source_count;
+ int init_view_heading;
+ int init_view_pitch;
+ int init_view_roll;
+ int timestamp;
+ int full_pano_width_pixels;
+ int full_pano_height_pixels;
+ int cropped_area_image_width;
+ int cropped_area_image_height;
+ int cropped_area_left;
+ int cropped_area_top;
+ QTDEMUX_AMBISONIC_TYPE ambisonic_type;
+ QTDEMUX_AMBISONIC_FORMAT ambisonic_format;
+ QTDEMUX_AMBISONIC_ORDER ambisonic_order;
+};
+
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
/* Macros for converting to/from timescale */
#define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
#define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
/* track id */
guint track_id;
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ guint64 tkhd_duration;
+#endif
+
/* duration/scale */
guint64 duration; /* in timescale units */
guint32 timescale;
/* buffer needs some custom processing, e.g. subtitles */
gboolean need_process;
+ /* buffer needs potentially be split, e.g. CEA608 subtitles */
+ gboolean need_split;
/* current position */
guint32 segment_index;
static void qtdemux_gst_structure_free (GstStructure * gststructure);
static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void gst_tag_register_spherical_tags (void);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
gst_qtdemux_class_init (GstQTDemuxClass * klass)
{
gst_tag_register_musicbrainz_tags ();
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ gst_tag_register_spherical_tags ();
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
gst_element_class_add_static_pad_template (gstelement_class,
&gst_qtdemux_sink_template);
gst_element_class_add_static_pad_template (gstelement_class,
qtdemux->old_streams = g_ptr_array_new_with_free_func
((GDestroyNotify) gst_qtdemux_stream_unref);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ qtdemux->spherical_metadata = (QtDemuxSphericalMetadata *)
+ malloc (sizeof (QtDemuxSphericalMetadata));
+
+ if (qtdemux->spherical_metadata) {
+ qtdemux->spherical_metadata->is_spherical = FALSE;
+ qtdemux->spherical_metadata->is_stitched = FALSE;
+ qtdemux->spherical_metadata->stitching_software = NULL;
+ qtdemux->spherical_metadata->projection_type = NULL;
+ qtdemux->spherical_metadata->stereo_mode = NULL;
+ qtdemux->spherical_metadata->source_count = 0;
+ qtdemux->spherical_metadata->init_view_heading = 0;
+ qtdemux->spherical_metadata->init_view_pitch = 0;
+ qtdemux->spherical_metadata->init_view_roll = 0;
+ qtdemux->spherical_metadata->timestamp = 0;
+ qtdemux->spherical_metadata->full_pano_width_pixels = 0;
+ qtdemux->spherical_metadata->full_pano_height_pixels = 0;
+ qtdemux->spherical_metadata->cropped_area_image_width = 0;
+ qtdemux->spherical_metadata->cropped_area_image_height = 0;
+ qtdemux->spherical_metadata->cropped_area_left = 0;
+ qtdemux->spherical_metadata->cropped_area_top = 0;
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_UNKNOWN;
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_UNKNOWN;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
gst_qtdemux_reset (qtdemux, TRUE);
{
GstQTDemux *qtdemux = GST_QTDEMUX (object);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata) {
+ if (qtdemux->spherical_metadata->stitching_software)
+ free(qtdemux->spherical_metadata->stitching_software);
+ if (qtdemux->spherical_metadata->projection_type)
+ free(qtdemux->spherical_metadata->projection_type);
+ if (qtdemux->spherical_metadata->stereo_mode)
+ free(qtdemux->spherical_metadata->stereo_mode);
+
+ free(qtdemux->spherical_metadata);
+ qtdemux->spherical_metadata = NULL;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (qtdemux->adapter) {
g_object_unref (G_OBJECT (qtdemux->adapter));
qtdemux->adapter = NULL;
stream->stream_tags);
gst_pad_push_event (stream->pad,
gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ /* post message qtdemux tag (for early recive application) */
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (stream->stream_tags)));
+#endif
}
if (G_UNLIKELY (stream->send_global_tags)) {
if (hard) {
qtdemux->segment_seqnum = GST_SEQNUM_INVALID;
- g_ptr_array_remove_range (qtdemux->active_streams,
- 0, qtdemux->active_streams->len);
- g_ptr_array_remove_range (qtdemux->old_streams,
- 0, qtdemux->old_streams->len);
+ g_ptr_array_set_size (qtdemux->active_streams, 0);
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
qtdemux->n_video_streams = 0;
qtdemux->n_audio_streams = 0;
qtdemux->n_sub_streams = 0;
g_ptr_array_add (dest, gst_qtdemux_stream_ref (stream));
}
- g_ptr_array_remove_range (src, 0, len);
+ g_ptr_array_set_size (src, 0);
}
static gboolean
}
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+static void
+_get_int_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, int *value)
+{
+ char *value_start, *value_end, *endptr;
+ const short value_length_max = 12;
+ char init_view_ret[12];
+ int value_length = 0;
+ int i = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_start[i] == '+' || value_start[i] == '-')
+ i++;
+ while (i < value_length) {
+ if (value_start[i] < '0' || value_start[i] > '9') {
+ GST_ERROR_OBJECT (qtdemux,
+ "error: incorrect value, integer was expected\n");
+ return;
+ }
+ i++;
+ }
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ strncpy (init_view_ret, value_start, value_length_max);
+ init_view_ret[value_length] = '\0';
+
+ *value = strtol (init_view_ret, &endptr, 10);
+ if (endptr == init_view_ret) {
+ GST_ERROR_OBJECT (qtdemux, "error: no digits were found\n");
+ return;
+ }
+
+ return;
+}
+
+static void
+_get_string_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, char **value)
+{
+ char *value_start, *value_end;
+ const short value_length_max = 256;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length >= value_length_max || value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = strndup(value_start, value_length);
+
+ return;
+}
+
+static void
+_get_bool_value_from_xml_string (GstQTDemux * qtdemux,
+ const char *xml_str, const char *param_name, gboolean * value)
+{
+ char *value_start, *value_end;
+ int value_length = 0;
+
+ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
+
+ if (!value_start) {
+ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
+ param_name);
+ return;
+ }
+
+ value_start += strlen (param_name);
+ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
+ value_start++;
+
+ value_end = strchr (value_start, '<');
+ if (!value_end) {
+ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
+ return;
+ }
+
+ value_length = value_end - value_start;
+ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
+ || (value_start[value_length - 1] == '\t')))
+ value_length--;
+
+ if (value_length < 1) {
+ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
+ return;
+ }
+
+ *value = g_strstr_len(value_start, value_length, "true") ? TRUE : FALSE;
+
+ return;
+}
+
+static void
+_parse_spatial_video_metadata_from_xml_string (GstQTDemux * qtdemux, const char *xmlStr)
+{
+ const char is_spherical_str[] = "<GSpherical:Spherical>";
+ const char is_stitched_str[] = "<GSpherical:Stitched>";
+ const char stitching_software_str[] = "<GSpherical:StitchingSoftware>";
+ const char projection_type_str[] = "<GSpherical:ProjectionType>";
+ const char stereo_mode_str[] = "<GSpherical:StereoMode>";
+ const char source_count_str[] = "<GSpherical:SourceCount>";
+ const char init_view_heading_str[] = "<GSpherical:InitialViewHeadingDegrees>";
+ const char init_view_pitch_str[] = "<GSpherical:InitialViewPitchDegrees>";
+ const char init_view_roll_str[] = "<GSpherical:InitialViewRollDegrees>";
+ const char timestamp_str[] = "<GSpherical:Timestamp>";
+ const char full_pano_width_str[] = "<GSpherical:FullPanoWidthPixels>";
+ const char full_pano_height_str[] = "<GSpherical:FullPanoHeightPixels>";
+ const char cropped_area_image_width_str[] =
+ "<GSpherical:CroppedAreaImageWidthPixels>";
+ const char cropped_area_image_height_str[] =
+ "<GSpherical:CroppedAreaImageHeightPixels>";
+ const char cropped_area_left_str[] = "<GSpherical:CroppedAreaLeftPixels>";
+ const char cropped_area_top_str[] = "<GSpherical:CroppedAreaTopPixels>";
+
+ QtDemuxSphericalMetadata * spherical_metadata = qtdemux->spherical_metadata;
+
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_spherical_str,
+ (gboolean *) & spherical_metadata->is_spherical);
+ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_stitched_str,
+ (gboolean *) & spherical_metadata->is_stitched);
+
+ if (spherical_metadata->is_spherical && spherical_metadata->is_stitched) {
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ stitching_software_str, &spherical_metadata->stitching_software);
+ _get_string_value_from_xml_string (qtdemux, xmlStr,
+ projection_type_str, &spherical_metadata->projection_type);
+ _get_string_value_from_xml_string (qtdemux, xmlStr, stereo_mode_str,
+ &spherical_metadata->stereo_mode);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, source_count_str,
+ &spherical_metadata->source_count);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ init_view_heading_str, &spherical_metadata->init_view_heading);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_pitch_str,
+ &spherical_metadata->init_view_pitch);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_roll_str,
+ &spherical_metadata->init_view_roll);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, timestamp_str,
+ &spherical_metadata->timestamp);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, full_pano_width_str,
+ &spherical_metadata->full_pano_width_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ full_pano_height_str, &spherical_metadata->full_pano_height_pixels);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_width_str,
+ &spherical_metadata->cropped_area_image_width);
+ _get_int_value_from_xml_string (qtdemux, xmlStr,
+ cropped_area_image_height_str,
+ &spherical_metadata->cropped_area_image_height);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_left_str,
+ &spherical_metadata->cropped_area_left);
+ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_top_str,
+ &spherical_metadata->cropped_area_top);
+ }
+
+ return;
+}
+
+static void
+gst_tag_register_spherical_tags (void) {
+ gst_tag_register ("is_spherical", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-spherical"),
+ _("Flag indicating if the video is a spherical video"),
+ NULL);
+ gst_tag_register ("is_stitched", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-stitched"),
+ _("Flag indicating if the video is stitched"),
+ NULL);
+ gst_tag_register ("stitching_software", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stitching-software"),
+ _("Software used to stitch the spherical video"),
+ NULL);
+ gst_tag_register ("projection_type", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-projection-type"),
+ _("Projection type used in the video frames"),
+ NULL);
+ gst_tag_register ("stereo_mode", GST_TAG_FLAG_META,
+ G_TYPE_STRING,
+ _("tag-stereo-mode"),
+ _("Description of stereoscopic 3D layout"),
+ NULL);
+ gst_tag_register ("source_count", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-source-count"),
+ _("Number of cameras used to create the spherical video"),
+ NULL);
+ gst_tag_register ("init_view_heading", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-heading"),
+ _("The heading angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_pitch", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-pitch"),
+ _("The pitch angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("init_view_roll", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-init-view-roll"),
+ _("The roll angle of the initial view in degrees"),
+ NULL);
+ gst_tag_register ("timestamp", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-timestamp"),
+ _("Epoch timestamp of when the first frame in the video was recorded"),
+ NULL);
+ gst_tag_register ("full_pano_width_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-width"),
+ _("Width of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("full_pano_height_pixels", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-full-pano-height"),
+ _("Height of the encoded video frame in pixels"),
+ NULL);
+ gst_tag_register ("cropped_area_image_width", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-width"),
+ _("Width of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_image_height", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-image-height"),
+ _("Height of the video frame to display (e.g. cropping)"),
+ NULL);
+ gst_tag_register ("cropped_area_left", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-left"),
+ _("Column where the left edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("cropped_area_top", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-cropped-area-top"),
+ _("Row where the top edge of the image was cropped from the"
+ " full sized panorama"),
+ NULL);
+ gst_tag_register ("ambisonic_type", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-type"),
+ _("Specifies the type of ambisonic audio represented"),
+ NULL);
+ gst_tag_register ("ambisonic_format", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-format"),
+ _("Specifies the ambisonic audio format"),
+ NULL);
+ gst_tag_register ("ambisonic_order", GST_TAG_FLAG_META,
+ G_TYPE_INT,
+ _("tag-ambisonic-order"),
+ _("Specifies the ambisonic audio channel order"),
+ NULL);
+
+ return;
+}
+
+static void
+_send_spherical_metadata_msg_to_bus (GstQTDemux * qtdemux)
+{
+ GstTagList *taglist;
+ QtDemuxSphericalMetadata *spherical_metadata = qtdemux->spherical_metadata;
+
+ GST_DEBUG_OBJECT (qtdemux, "is_spherical = %d",
+ spherical_metadata->is_spherical);
+ GST_DEBUG_OBJECT (qtdemux, "is_stitched = %d",
+ spherical_metadata->is_stitched);
+ GST_DEBUG_OBJECT (qtdemux, "stitching_software = %s",
+ spherical_metadata->stitching_software);
+ GST_DEBUG_OBJECT (qtdemux, "projection_type = %s",
+ spherical_metadata->projection_type);
+ GST_DEBUG_OBJECT (qtdemux, "stereo_mode = %s",
+ spherical_metadata->stereo_mode);
+ GST_DEBUG_OBJECT (qtdemux, "source_count %d",
+ spherical_metadata->source_count);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_heading = %d",
+ spherical_metadata->init_view_heading);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_pitch = %d",
+ spherical_metadata->init_view_pitch);
+ GST_DEBUG_OBJECT (qtdemux, "init_view_roll = %d",
+ spherical_metadata->init_view_roll);
+ GST_DEBUG_OBJECT (qtdemux, "timestamp = %d", spherical_metadata->timestamp);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_width_pixels = %d",
+ spherical_metadata->full_pano_width_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "full_pano_height_pixels = %d",
+ spherical_metadata->full_pano_height_pixels);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_width = %d",
+ spherical_metadata->cropped_area_image_width);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_height = %d",
+ spherical_metadata->cropped_area_image_height);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_left = %d",
+ spherical_metadata->cropped_area_left);
+ GST_DEBUG_OBJECT (qtdemux, "cropped_area_top = %d",
+ spherical_metadata->cropped_area_top);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type = %d",
+ spherical_metadata->ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order = %d",
+ spherical_metadata->ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_format = %d",
+ spherical_metadata->ambisonic_format);
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "is_spherical", spherical_metadata->is_spherical,
+ "is_stitched", spherical_metadata->is_stitched,
+ "source_count", spherical_metadata->source_count,
+ "init_view_heading", spherical_metadata->init_view_heading,
+ "init_view_pitch", spherical_metadata->init_view_pitch,
+ "init_view_roll", spherical_metadata->init_view_roll,
+ "timestamp", spherical_metadata->timestamp,
+ "full_pano_width_pixels", spherical_metadata->full_pano_width_pixels,
+ "full_pano_height_pixels", spherical_metadata->full_pano_height_pixels,
+ "cropped_area_image_width", spherical_metadata->cropped_area_image_width,
+ "cropped_area_image_height", spherical_metadata->cropped_area_image_height,
+ "cropped_area_left", spherical_metadata->cropped_area_left,
+ "cropped_area_top", spherical_metadata->cropped_area_top,
+ "ambisonic_type", spherical_metadata->ambisonic_type,
+ "ambisonic_format", spherical_metadata->ambisonic_format,
+ "ambisonic_order", spherical_metadata->ambisonic_order,
+ NULL);
+
+ if (spherical_metadata->stitching_software)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stitching_software", spherical_metadata->stitching_software,
+ NULL);
+ if (spherical_metadata->projection_type)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "projection_type", spherical_metadata->projection_type,
+ NULL);
+ if (spherical_metadata->stereo_mode)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ "stereo_mode", spherical_metadata->stereo_mode,
+ NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
+ gst_tag_list_copy (taglist)));
+
+ gst_tag_list_unref(taglist);
+
+ return;
+}
+
+static void
+qtdemux_parse_SA3D (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ guint offset = 0;
+
+ guint8 version = 0;
+ guint8 ambisonic_type = 0;
+ guint32 ambisonic_order = 0;
+ guint8 ambisonic_channel_ordering = 0;
+ guint8 ambisonic_normalization = 0;
+ guint32 num_channels = 0;
+ guint32 channel_map[49] = { 0 }; /* Up to 6th order */
+
+ int i;
+
+ GST_DEBUG_OBJECT (qtdemux, "qtdemux_parse_SA3D");
+
+ qtdemux->header_size += length;
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "SA3D atom is too short, skipping");
+ return;
+ }
+
+ version = QT_UINT8 (buffer + offset);
+ ambisonic_type = QT_UINT8 (buffer + offset + 1);
+ ambisonic_order = QT_UINT32 (buffer + offset + 2);
+ ambisonic_channel_ordering = QT_UINT8 (buffer + offset + 6);
+ ambisonic_normalization = QT_UINT8 (buffer + offset + 7);
+ num_channels = QT_UINT32 (buffer + offset + 8);
+ for (i = 0; i < num_channels; ++i)
+ channel_map[i] = QT_UINT32 (buffer + offset + 12 + i * 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "version: %d", version);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type: %d", ambisonic_type);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order: %d", ambisonic_order);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_channel_ordering: %d",
+ ambisonic_channel_ordering);
+ GST_DEBUG_OBJECT (qtdemux, "ambisonic_normalization: %d",
+ ambisonic_normalization);
+ GST_DEBUG_OBJECT (qtdemux, "num_channels: %d", num_channels);
+ for (i = 0; i < num_channels; ++i)
+ GST_DEBUG_OBJECT (qtdemux, "channel_map: %d", channel_map[i]);
+
+ if (version == RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED) {
+ if (ambisonic_type == RFC_AMBISONIC_TYPE_PERIPHONIC)
+ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_PERIPHONIC;
+
+ if (ambisonic_order == RFC_AMBISONIC_ORDER_FOA) {
+ if (num_channels == 4) {
+ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_FOA;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_ACN)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_SN3D)
+ && (channel_map[0] == 0) && (channel_map[1] == 1)
+ && (channel_map[2] == 2) && (channel_map[3] == 3))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMBIX;
+
+ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_FUMA)
+ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_FUMA)
+ && (channel_map[0] == 0) && (channel_map[1] == 3)
+ && (channel_map[2] == 1) && (channel_map[3] == 2))
+ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMB;
+ }
+ }
+ }
+
+ return;
+}
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
static void
qtdemux_update_default_sample_encryption_settings (GstQTDemux * qtdemux,
QtDemuxCencSampleSetInfo * info, guint32 is_encrypted, guint8 iv_size,
0xa2, 0x44, 0x6c, 0x42, 0x7c, 0x64, 0x8d, 0xf4
};
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ static const guint8 spherical_uuid[] = {
+ 0xff, 0xcc, 0x82, 0x63, 0xf8, 0x55, 0x4a, 0x93,
+ 0x88, 0x14, 0x58, 0x7a, 0x02, 0x52, 0x1f, 0xdd
+ };
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
guint offset;
/* counts as header data */
return;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (memcmp (buffer + offset, spherical_uuid, 16) == 0) {
+ const char *contents;
+
+ GST_DEBUG_OBJECT (qtdemux, "spherical uuid was found");
+ contents = (char *) (buffer + offset + 16);
+ GST_DEBUG_OBJECT (qtdemux, "contents: %s\n", contents);
+
+ if (qtdemux->spherical_metadata)
+ _parse_spatial_video_metadata_from_xml_string (qtdemux, contents);
+
+ return;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+
if (memcmp (buffer + offset, xmp_uuid, 16) == 0) {
GstBuffer *buf;
GstTagList *taglist;
beach:
if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
/* digested all data, show what we have */
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ if (qtdemux->spherical_metadata)
+ _send_spherical_metadata_msg_to_bus (qtdemux);
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
qtdemux_prepare_streams (qtdemux);
QTDEMUX_EXPOSE_LOCK (qtdemux);
ret = qtdemux_expose_streams (qtdemux);
seg_media_start_mov = seg->trak_media_start;
GST_LOG_OBJECT (qtdemux, "keyframe index %u ts %" G_GUINT64_FORMAT
- " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT "\n",
+ " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
k_index, target_ts, seg_media_start_mov,
GST_TIME_ARGS (seg->media_start));
}
static guint8 *
-convert_to_ccdata (const guint8 * ccpair, guint8 ccpair_size, guint field,
+convert_to_s334_1a (const guint8 * ccpair, guint8 ccpair_size, guint field,
gsize * res)
{
guint8 *storage;
*res = ccpair_size / 2 * 3;
storage = g_malloc (*res);
for (i = 0; i * 2 < ccpair_size; i += 1) {
+ /* FIXME: Use line offset 0 as we simply can't know here */
if (field == 1)
- storage[i * 3] = 0xfc;
+ storage[i * 3] = 0x80 | 0x00;
else
- storage[i * 3] = 0xfd;
+ storage[i * 3] = 0x00 | 0x00;
storage[i * 3 + 1] = ccpair[i * 2];
storage[i * 3 + 2] = ccpair[i * 2 + 1];
}
goto invalid_cdat;
}
- /* Convert to cc_data triplet */
+ /* Convert to S334-1 Annex A byte triplet */
if (fourcc == FOURCC_cdat)
- cdat = convert_to_ccdata (data + 8, atom_length - 8, 1, &cdat_size);
+ cdat = convert_to_s334_1a (data + 8, atom_length - 8, 1, &cdat_size);
else
- cdt2 = convert_to_ccdata (data + 8, atom_length - 8, 2, &cdt2_size);
+ cdt2 = convert_to_s334_1a (data + 8, atom_length - 8, 2, &cdt2_size);
GST_DEBUG_OBJECT (stream->pad, "size:%" G_GSIZE_FORMAT " atom_length:%u",
size, atom_length);
if (fourcc == FOURCC_cdat) {
if (cdat == NULL)
cdat =
- convert_to_ccdata (data + atom_length + 8,
+ convert_to_s334_1a (data + atom_length + 8,
new_atom_length - 8, 1, &cdat_size);
else
GST_WARNING_OBJECT (stream->pad,
} else {
if (cdt2 == NULL)
cdt2 =
- convert_to_ccdata (data + atom_length + 8,
+ convert_to_s334_1a (data + atom_length + 8,
new_atom_length - 8, 2, &cdt2_size);
else
GST_WARNING_OBJECT (stream->pad,
return buf;
}
+static GstFlowReturn
+gst_qtdemux_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime pts, duration;
+
+ if (stream->need_clip)
+ buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
+
+ if (G_UNLIKELY (buf == NULL))
+ goto exit;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT " on pad %s",
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_PAD_NAME (stream->pad));
+
+ if (stream->protected && stream->protection_scheme_type == FOURCC_cenc) {
+ GstStructure *crypto_info;
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ gint index;
+ GstEvent *event;
+
+ while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
+ GST_TRACE_OBJECT (stream->pad, "pushing protection event: %"
+ GST_PTR_FORMAT, event);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ if (info->crypto_info == NULL) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "cenc metadata hasn't been parsed yet, pushing buffer as if it wasn't encrypted");
+ } else {
+ /* The end of the crypto_info array matches our n_samples position,
+ * so count backward from there */
+ index = stream->sample_index - stream->n_samples + info->crypto_info->len;
+ if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
+ /* steal structure from array */
+ crypto_info = g_ptr_array_index (info->crypto_info, index);
+ g_ptr_array_index (info->crypto_info, index) = NULL;
+ GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
+ info->crypto_info->len);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cenc metadata to buffer");
+ } else {
+ GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
+ index, stream->sample_index);
+ }
+ }
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
+
+ pts = GST_BUFFER_PTS (buf);
+ duration = GST_BUFFER_DURATION (buf);
+
+ ret = gst_pad_push (stream->pad, buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* mark position in stream, we'll need this to know when to send GAP event */
+ stream->segment.position = pts + duration;
+ }
+
+exit:
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qtdemux_split_and_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (stream->subtype == FOURCC_clcp
+ && CUR_STREAM (stream)->fourcc == FOURCC_c608 && stream->need_split) {
+ GstMapInfo map;
+ guint n_output_buffers, n_field1 = 0, n_field2 = 0;
+ guint n_triplets, i;
+ guint field1_off = 0, field2_off = 0;
+
+ /* We have to split CEA608 buffers so that each outgoing buffer contains
+ * one byte pair per field according to the framerate of the video track.
+ *
+ * If there is only a single byte pair per field we don't have to do
+ * anything
+ */
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ n_triplets = map.size / 3;
+ for (i = 0; i < n_triplets; i++) {
+ if (map.data[3 * i] & 0x80)
+ n_field1++;
+ else
+ n_field2++;
+ }
+
+ g_assert (n_field1 || n_field2);
+
+ /* If there's more than 1 frame we have to split, otherwise we can just
+ * pass through */
+ if (n_field1 > 1 || n_field2 > 1) {
+ n_output_buffers =
+ gst_util_uint64_scale (GST_BUFFER_DURATION (buf),
+ CUR_STREAM (stream)->fps_n, GST_SECOND * CUR_STREAM (stream)->fps_d);
+
+ for (i = 0; i < n_output_buffers; i++) {
+ GstBuffer *outbuf =
+ gst_buffer_new_and_alloc ((n_field1 ? 3 : 0) + (n_field2 ? 3 : 0));
+ GstMapInfo outmap;
+ guint8 *outptr;
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ outptr = outmap.data;
+
+ if (n_field1) {
+ gboolean found = FALSE;
+
+ while (map.data + field1_off < map.data + map.size) {
+ if (map.data[field1_off] & 0x80) {
+ memcpy (outptr, &map.data[field1_off], 3);
+ field1_off += 3;
+ found = TRUE;
+ break;
+ }
+ field1_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x80, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ if (n_field2) {
+ gboolean found = FALSE;
+
+ while (map.data + field2_off < map.data + map.size) {
+ if ((map.data[field2_off] & 0x80) == 0) {
+ memcpy (outptr, &map.data[field2_off], 3);
+ field2_off += 3;
+ found = TRUE;
+ break;
+ }
+ field2_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x00, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+
+ GST_BUFFER_PTS (outbuf) =
+ GST_BUFFER_PTS (buf) + gst_util_uint64_scale (i,
+ GST_SECOND * CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (GST_SECOND, CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_OFFSET (outbuf) = -1;
+ GST_BUFFER_OFFSET_END (outbuf) = -1;
+
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, outbuf);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)
+ break;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ } else {
+ gst_buffer_unmap (buf, &map);
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+ } else {
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+
+ return ret;
+}
+
/* Sets a buffer's attributes properly and pushes it downstream.
* Also checks for additional actions and custom processing that may
* need to be done first.
GST_BUFFER_OFFSET (buf) = -1;
GST_BUFFER_OFFSET_END (buf) = -1;
+ if (!keyframe) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ stream->on_keyframe = FALSE;
+ } else {
+ stream->on_keyframe = TRUE;
+ }
+
if (G_UNLIKELY (CUR_STREAM (stream)->rgb8_palette))
gst_buffer_append_memory (buf,
gst_memory_ref (CUR_STREAM (stream)->rgb8_palette));
}
#endif
- if (stream->need_clip)
- buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
-
- if (G_UNLIKELY (buf == NULL))
- goto exit;
-
- if (G_UNLIKELY (stream->discont)) {
- GST_LOG_OBJECT (qtdemux, "marking discont buffer");
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
- stream->discont = FALSE;
- } else {
- GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
- }
-
- if (!keyframe) {
- GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
- stream->on_keyframe = FALSE;
- } else {
- stream->on_keyframe = TRUE;
- }
-
-
- GST_LOG_OBJECT (qtdemux,
- "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
- ", duration %" GST_TIME_FORMAT " on pad %s", GST_TIME_ARGS (dts),
- GST_TIME_ARGS (pts), GST_TIME_ARGS (duration),
- GST_PAD_NAME (stream->pad));
-
- if (stream->protected && stream->protection_scheme_type == FOURCC_cenc) {
- GstStructure *crypto_info;
- QtDemuxCencSampleSetInfo *info =
- (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
- gint index;
- GstEvent *event;
-
- while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
- GST_TRACE_OBJECT (stream->pad, "pushing protection event: %"
- GST_PTR_FORMAT, event);
- gst_pad_push_event (stream->pad, event);
- }
-
- if (info->crypto_info == NULL) {
- GST_DEBUG_OBJECT (qtdemux,
- "cenc metadata hasn't been parsed yet, pushing buffer as if it wasn't encrypted");
- } else {
- /* The end of the crypto_info array matches our n_samples position,
- * so count backward from there */
- index = stream->sample_index - stream->n_samples + info->crypto_info->len;
- if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
- /* steal structure from array */
- crypto_info = g_ptr_array_index (info->crypto_info, index);
- g_ptr_array_index (info->crypto_info, index) = NULL;
- GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
- info->crypto_info->len);
- if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
- GST_ERROR_OBJECT (qtdemux,
- "failed to attach cenc metadata to buffer");
- } else {
- GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
- index, stream->sample_index);
- }
- }
- }
-
- if (stream->alignment > 1)
- buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
-
- ret = gst_pad_push (stream->pad, buf);
-
- if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
- /* mark position in stream, we'll need this to know when to send GAP event */
- stream->segment.position = pts + duration;
- }
+ ret = gst_qtdemux_split_and_push_buffer (qtdemux, stream, buf);
exit:
return ret;
qtdemux_parse_container (qtdemux, node, buffer + 36, end);
break;
}
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ case FOURCC_SA3D:
+ {
+ qtdemux_parse_SA3D (qtdemux, buffer, end - buffer);
+ break;
+ }
+#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
default:
if (!strcmp (type->name, "unknown"))
GST_MEMDUMP ("Unknown tag", buffer + 4, end - buffer - 4);
GST_TIME_FORMAT, duration, first_duration,
n_samples - 1, GST_TIME_ARGS (avg_duration));
- gst_video_guess_framerate (avg_duration, &CUR_STREAM (stream)->fps_n,
- &CUR_STREAM (stream)->fps_d);
+#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
+ gst_video_guess_framerate (avg_duration,
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
+ if (CUR_STREAM (stream)->fps_d == 0)
+ fps_available = FALSE;
+#else
+ fps_available =
+ gst_video_guess_framerate (avg_duration,
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
+#endif
GST_DEBUG_OBJECT (qtdemux,
"Calculating framerate, timescale %u gave fps_n %d fps_d %d",
}
}
- else if (stream->subtype == FOURCC_clcp) {
- gboolean fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+ else if (stream->subtype == FOURCC_clcp && CUR_STREAM (stream)->caps) {
+ const GstStructure *s;
+ QtDemuxStream *fps_stream = NULL;
+ gboolean fps_available = FALSE;
- if (CUR_STREAM (stream)->caps) {
- CUR_STREAM (stream)->caps =
- gst_caps_make_writable (CUR_STREAM (stream)->caps);
+ /* CEA608 closed caption tracks are a bit special in that each sample
+ * can contain CCs for multiple frames, and CCs can be omitted and have to
+ * be inferred from the duration of the sample then.
+ *
+ * As such we take the framerate from the (first) video track here for
+ * CEA608 as there must be one CC byte pair for every video frame
+ * according to the spec.
+ *
+ * For CEA708 all is fine and there is one sample per frame.
+ */
- /* set framerate if calculated framerate is reliable */
- if (fps_available) {
- gst_caps_set_simple (CUR_STREAM (stream)->caps,
- "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
- CUR_STREAM (stream)->fps_d, NULL);
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
+ gint i;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *tmp = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (tmp->subtype == FOURCC_vide) {
+ fps_stream = tmp;
+ break;
+ }
+ }
+
+ if (fps_stream) {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, fps_stream);
+ CUR_STREAM (stream)->fps_n = CUR_STREAM (fps_stream)->fps_n;
+ CUR_STREAM (stream)->fps_d = CUR_STREAM (fps_stream)->fps_d;
}
+ } else {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+ fps_stream = stream;
+ }
+
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
}
}
stream = _create_stream (qtdemux, track_id);
stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ if (!gst_byte_reader_skip (&tkhd, 4))
+ goto corrupt_file;
+
+ if (tkhd_version == 1) {
+ if (!gst_byte_reader_get_uint64_be (&tkhd, &stream->tkhd_duration))
+ goto corrupt_file;
+ } else {
+ guint32 dur = 0;
+ if (!gst_byte_reader_get_uint32_be (&tkhd, &dur))
+ goto corrupt_file;
+ stream->tkhd_duration = dur;
+ }
+ GST_INFO_OBJECT (qtdemux, "tkhd duration: %" G_GUINT64_FORMAT,
+ stream->tkhd_duration);
+#endif
/* need defaults for fragments */
qtdemux_parse_trex (qtdemux, stream, &dummy, &dummy, &dummy);
version = QT_UINT32 ((guint8 *) mdhd->data + 8);
GST_LOG_OBJECT (qtdemux, "track version/flags: %08x", version);
if (version == 0x01000000) {
- if (len < 38)
+ if (len < 42)
goto corrupt_file;
stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 28);
stream->duration = QT_UINT64 ((guint8 *) mdhd->data + 32);
- lang_code = QT_UINT16 ((guint8 *) mdhd->data + 36);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 40);
} else {
if (len < 30)
goto corrupt_file;
guint32 matrix[9];
/* version 1 uses some 64-bit ints */
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ if (!gst_byte_reader_skip (&tkhd, 16))
+#else
if (!gst_byte_reader_skip (&tkhd, 20 + value_size))
+#endif
goto corrupt_file;
if (!qtdemux_parse_transformation_matrix (qtdemux, &tkhd, matrix, "tkhd"))
fiel = NULL;
/* pick 'the' stsd child */
mp4v = qtdemux_tree_get_child_by_index (stsd, stsd_index);
- if (!stream->protected) {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ // We should skip parsing the stsd for non-protected streams if
+ // the entry doesn't match the fourcc, since they don't change
+ // format. However, for protected streams we can have partial
+ // encryption, where parts of the stream are encrypted and parts
+ // not. For both parts of such streams, we should ensure the
+ // esds overrides are parsed for both from the stsd.
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv)
mp4v = NULL;
- }
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv) {
+ else if (!stream->protected)
mp4v = NULL;
- }
}
if (mp4v) {
}
mp4a = qtdemux_tree_get_child_by_index (stsd, stsd_index);
- if (!stream->protected) {
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv) {
- mp4v = NULL;
- }
- }
- if (stream->protected && fourcc == FOURCC_mp4a) {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca) {
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca)
mp4a = NULL;
- }
- } else {
- if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_mp4a) {
+ else if (!stream->protected)
mp4a = NULL;
- }
}
wave = NULL;
qtdemux_prepare_streams (GstQTDemux * qtdemux)
{
GstFlowReturn ret = GST_FLOW_OK;
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ guint64 tkhd_max_duration = 0;
+#endif
gint i;
GST_DEBUG_OBJECT (qtdemux, "prepare streams");
} else {
/* discard any stray moof */
qtdemux->moof_offset = 0;
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ if (tkhd_max_duration < stream->tkhd_duration)
+ tkhd_max_duration = stream->tkhd_duration;
+#endif
}
/* prepare braking */
}
}
+#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
+ if (!qtdemux->fragmented && (qtdemux->duration > tkhd_max_duration)) {
+ GST_INFO_OBJECT (qtdemux,
+ "Update duration: %" G_GUINT64_FORMAT " -> %" G_GUINT64_FORMAT,
+ qtdemux->duration, tkhd_max_duration);
+ qtdemux->duration = tkhd_max_duration;
+ }
+#endif
+
return ret;
}
return GST_FLOW_ERROR;
}
- g_ptr_array_remove_range (qtdemux->old_streams,
- 0, qtdemux->old_streams->len);
-
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
qtdemux->need_segment = TRUE;
return GST_FLOW_OK;
}
}
- g_ptr_array_remove_range (qtdemux->old_streams, 0, qtdemux->old_streams->len);
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
/* check if we should post a redirect in case there is a single trak
* and it is a redirecting trak */
_codec ("CEA 608 Closed Caption");
caps =
gst_caps_new_simple ("closedcaption/x-cea-608", "format",
- G_TYPE_STRING, "cc_data", NULL);
+ G_TYPE_STRING, "s334-1a", NULL);
stream->need_process = TRUE;
+ stream->need_split = TRUE;
break;
case FOURCC_c708:
_codec ("CEA 708 Closed Caption");