n = gst_caps_get_size (caps);
for (i = 0; i < n; i++) {
const GstStructure *s = gst_caps_get_structure (caps, i);
+ const GValue *framerate = gst_structure_get_value (s, "framerate");
if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
- const GValue *framerate;
-
- framerate = gst_structure_get_value (s, "framerate");
if (direction == GST_PAD_SRC) {
/* SRC direction: We produce upstream caps
*
* We can convert everything to CEA608.
*/
- if (framerate) {
- GstCaps *tmp;
-
- tmp =
- gst_caps_merge (gst_static_caps_get (&cdp_caps),
- gst_static_caps_get (&non_cdp_caps));
- tmp = gst_caps_make_writable (tmp);
- gst_caps_set_value (tmp, "framerate", framerate);
- res = gst_caps_merge (res, tmp);
- } else {
- res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps));
- res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
- }
+ res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps_framerate));
+ res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
} else {
/* SINK: We produce downstream caps
*
*/
if (framerate) {
GstCaps *tmp;
- GstStructure *t, *u;
+ GstStructure *t;
/* Create caps that contain the intersection of all framerates with
* the CDP allowed framerates */
t = gst_caps_get_structure (tmp, 0);
gst_structure_set_name (t, "closedcaption/x-cea-608");
gst_structure_remove_field (t, "format");
- u = gst_structure_intersect (s, t);
- gst_caps_unref (tmp);
-
- if (u) {
- const GValue *cdp_framerate;
+ if (gst_structure_can_intersect (s, t)) {
+ gst_caps_unref (tmp);
- /* There's an intersection between the framerates so we can convert
- * into CDP with exactly those framerates */
- cdp_framerate = gst_structure_get_value (u, "framerate");
- tmp = gst_caps_make_writable (gst_static_caps_get (&cdp_caps));
- gst_caps_set_value (tmp, "framerate", cdp_framerate);
- gst_structure_free (u);
+ tmp =
+ gst_caps_make_writable (gst_static_caps_get
+ (&cdp_caps_framerate));
res = gst_caps_merge (res, tmp);
+ } else {
+ gst_caps_unref (tmp);
}
-
/* And we can convert to everything else with the given framerate */
tmp = gst_caps_make_writable (gst_static_caps_get (&non_cdp_caps));
gst_caps_set_value (tmp, "framerate", framerate);
}
}
} else if (gst_structure_has_name (s, "closedcaption/x-cea-708")) {
- const GValue *framerate;
-
- framerate = gst_structure_get_value (s, "framerate");
-
if (direction == GST_PAD_SRC) {
/* SRC direction: We produce upstream caps
*
/* Downstream wants only CDP */
/* We need CDP from upstream in that case */
- if (framerate) {
- GstCaps *tmp;
-
- tmp = gst_caps_make_writable (gst_static_caps_get (&cdp_caps));
- gst_caps_set_value (tmp, "framerate", framerate);
- res = gst_caps_merge (res, tmp);
- } else {
- res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps));
- }
+ res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps_framerate));
/* Or anything else with a CDP framerate */
if (framerate) {
GstCaps *tmp;
- GstStructure *t, *u;
+ GstStructure *t;
+ const GValue *cdp_framerate;
/* Create caps that contain the intersection of all framerates with
* the CDP allowed framerates */
gst_caps_make_writable (gst_static_caps_get
(&cdp_caps_framerate));
t = gst_caps_get_structure (tmp, 0);
- gst_structure_set_name (t, "closedcaption/x-cea-708");
- gst_structure_remove_field (t, "format");
- u = gst_structure_intersect (s, t);
- gst_caps_unref (tmp);
-
- if (u) {
- const GValue *cdp_framerate;
- /* There's an intersection between the framerates so we can convert
- * into CDP with exactly those framerates from anything else */
- cdp_framerate = gst_structure_get_value (u, "framerate");
-
- tmp =
- gst_caps_make_writable (gst_static_caps_get (&non_cdp_caps));
- gst_caps_set_value (tmp, "framerate", cdp_framerate);
- res = gst_caps_merge (res, tmp);
- }
+ /* There's an intersection between the framerates so we can convert
+ * into CDP with exactly those framerates from anything else */
+ cdp_framerate = gst_structure_get_value (t, "framerate");
+ tmp = gst_caps_make_writable (gst_static_caps_get (&non_cdp_caps));
+ gst_caps_set_value (tmp, "framerate", cdp_framerate);
+ res = gst_caps_merge (res, tmp);
} else {
GstCaps *tmp, *cdp_caps;
const GValue *cdp_framerate;
}
} else {
/* Downstream wants not only CDP, we can do everything */
-
- if (framerate) {
- GstCaps *tmp;
-
- tmp =
- gst_caps_merge (gst_static_caps_get (&cdp_caps),
- gst_static_caps_get (&non_cdp_caps));
- tmp = gst_caps_make_writable (tmp);
- gst_caps_set_value (tmp, "framerate", framerate);
- res = gst_caps_merge (res, tmp);
- } else {
- res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps));
- res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
- }
+ res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps_framerate));
+ res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
}
} else {
GstCaps *tmp;
gst_caps_get_structure (gst_static_caps_get (&cdp_caps), 0))) {
/* Upstream provided CDP caps, we can do everything independent of
* framerate */
- if (framerate) {
- tmp = gst_caps_make_writable (gst_static_caps_get (&cdp_caps));
- gst_caps_set_value (tmp, "framerate", framerate);
- res = gst_caps_merge (res, tmp);
- } else {
- res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps));
- }
+ res = gst_caps_merge (res, gst_static_caps_get (&cdp_caps_framerate));
} else if (framerate) {
- GstStructure *t, *u;
+ const GValue *cdp_framerate;
+ GstStructure *t;
/* Upstream did not provide CDP. We can only do CDP if upstream
* happened to have a CDP framerate */
gst_caps_make_writable (gst_static_caps_get
(&cdp_caps_framerate));
t = gst_caps_get_structure (tmp, 0);
- gst_structure_set_name (t, "closedcaption/x-cea-708");
- gst_structure_remove_field (t, "format");
- u = gst_structure_intersect (s, t);
- gst_caps_unref (tmp);
-
- if (u) {
- const GValue *cdp_framerate;
-
- /* There's an intersection between the framerates so we can convert
- * into CDP with exactly those framerates */
- cdp_framerate = gst_structure_get_value (u, "framerate");
- tmp = gst_caps_make_writable (gst_static_caps_get (&cdp_caps));
- gst_caps_set_value (tmp, "framerate", cdp_framerate);
- gst_structure_free (u);
- res = gst_caps_merge (res, tmp);
- }
- }
+ /* There's an intersection between the framerates so we can convert
+ * into CDP with exactly those framerates */
+ cdp_framerate = gst_structure_get_value (t, "framerate");
+ gst_caps_set_value (tmp, "framerate", cdp_framerate);
- /* We can always convert CEA708 to all non-CDP formats */
- if (framerate) {
- tmp = gst_caps_make_writable (gst_static_caps_get (&non_cdp_caps));
- gst_caps_set_value (tmp, "framerate", framerate);
res = gst_caps_merge (res, tmp);
- } else {
- res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
}
+ /* We can always convert CEA708 to all non-CDP formats */
+ res = gst_caps_merge (res, gst_static_caps_get (&non_cdp_caps));
}
} else {
g_assert_not_reached ();
}
}
+ GST_DEBUG_OBJECT (self, "pre filter caps %" GST_PTR_FORMAT, res);
+
/* We can convert anything into anything but it might involve loss of
* information so always filter according to the order in our template caps
* in the end */
gst_caps_unref (templ);
- GST_DEBUG_OBJECT (self,
- "Transformed in direction %s caps %" GST_PTR_FORMAT " to %"
- GST_PTR_FORMAT, direction == GST_PAD_SRC ? "src" : "sink", caps, res);
+ GST_DEBUG_OBJECT (self, "Transformed in direction %s caps %" GST_PTR_FORMAT,
+ direction == GST_PAD_SRC ? "src" : "sink", caps);
+ GST_DEBUG_OBJECT (self, "filter %" GST_PTR_FORMAT, filter);
+ GST_DEBUG_OBJECT (self, "to %" GST_PTR_FORMAT, res);
return res;
}
const GValue *framerate;
GstCaps *intersection, *templ;
+ GST_DEBUG_OBJECT (self, "Fixating in direction %s incaps %" GST_PTR_FORMAT,
+ direction == GST_PAD_SRC ? "src" : "sink", incaps);
+ GST_DEBUG_OBJECT (self, "and outcaps %" GST_PTR_FORMAT, outcaps);
+
/* Prefer passthrough if we can */
if (gst_caps_is_subset (incaps, outcaps)) {
gst_caps_unref (outcaps);
GST_BASE_TRANSFORM_CLASS (parent_class)->fixate_caps (base, direction,
incaps, outcaps);
- if (direction == GST_PAD_SRC)
- return outcaps;
-
- /* if we generate caps for the source pad, pass through any framerate
- * upstream might've given us and remove any framerate that might've
- * been added by basetransform due to intersecting with downstream */
+ /* remove any framerate that might've been added by basetransform due to
+ * intersecting with downstream */
s = gst_caps_get_structure (incaps, 0);
framerate = gst_structure_get_value (s, "framerate");
outcaps = gst_caps_make_writable (outcaps);
t = gst_caps_get_structure (outcaps, 0);
- if (framerate) {
- gst_structure_set_value (t, "framerate", framerate);
- } else {
+ if (!framerate) {
gst_structure_remove_field (t, "framerate");
}
&self->out_fps_d))
self->out_fps_n = self->out_fps_d = 0;
+ gst_video_time_code_clear (&self->current_output_timecode);
+
/* Caps can be different but we can passthrough as long as they can
* intersect, i.e. have same caps name and format */
passthrough = gst_caps_can_intersect (incaps, outcaps);
return &null_fps_entry;
}
+static void
+get_framerate_output_scale (GstCCConverter * self,
+ const struct cdp_fps_entry *in_fps_entry, gint * scale_n, gint * scale_d)
+{
+ if (self->in_fps_n == 0 || self->out_fps_d == 0) {
+ *scale_n = 1;
+ *scale_d = 1;
+ return;
+ }
+
+ /* compute the relative rates of the two framerates */
+ if (!gst_util_fraction_multiply (in_fps_entry->fps_d, in_fps_entry->fps_n,
+ self->out_fps_n, self->out_fps_d, scale_n, scale_d))
+ /* we should never overflow */
+ g_assert_not_reached ();
+}
+
+static gboolean
+interpolate_time_code_with_framerate (GstCCConverter * self,
+ const GstVideoTimeCode * tc, gint out_fps_n, gint out_fps_d,
+ gint scale_n, gint scale_d, GstVideoTimeCode * out)
+{
+ gchar *tc_str;
+ gint output_n, output_d;
+ guint output_frame;
+ GstVideoTimeCodeFlags flags;
+
+ g_return_val_if_fail (tc != NULL, FALSE);
+ g_return_val_if_fail (out != NULL, FALSE);
+ /* out_n/d can only be 0 if scale_n/d are 1/1 */
+ g_return_val_if_fail ((scale_n == 1 && scale_d == 1) || (out_fps_n != 0
+ && out_fps_d != 0), FALSE);
+
+ if (!tc || tc->config.fps_n == 0)
+ return FALSE;
+
+ gst_util_fraction_multiply (tc->frames, 1, scale_n, scale_d, &output_n,
+ &output_d);
+
+ tc_str = gst_video_time_code_to_string (tc);
+ GST_TRACE_OBJECT (self, "interpolating time code %s with scale %d/%d "
+ "to frame %d/%d", tc_str, scale_n, scale_d, output_n, output_d);
+ g_free (tc_str);
+
+ if (out_fps_n == 0 || out_fps_d == 0) {
+ out_fps_n = tc->config.fps_n;
+ out_fps_d = tc->config.fps_d;
+ }
+
+ flags = tc->config.flags;
+ if ((flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) != 0 && out_fps_d != 1001
+ && out_fps_n != 60000 && out_fps_n != 30000) {
+ flags &= ~GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME;
+ } else if ((flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) == 0
+ && out_fps_d == 1001 && (out_fps_n == 60000 || out_fps_n == 30000)) {
+ /* XXX: theoretically, not quite correct however this is an assumption
+ * we have elsewhere that these framerates are always drop-framed */
+ flags |= GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME;
+ }
+
+ output_frame = output_n / output_d;
+
+ *out = (GstVideoTimeCode) GST_VIDEO_TIME_CODE_INIT;
+ do {
+ /* here we try to find the next available valid timecode. The dropped
+ * (when they exist) frames in time codes are that the beginning of each
+ * minute */
+ gst_video_time_code_clear (out);
+ gst_video_time_code_init (out, out_fps_n, out_fps_d,
+ tc->config.latest_daily_jam, flags, tc->hours, tc->minutes,
+ tc->seconds, output_frame, tc->field_count);
+ output_frame++;
+ } while ((flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) != 0
+ && output_frame < 10 && !gst_video_time_code_is_valid (out));
+
+ tc_str = gst_video_time_code_to_string (out);
+ GST_TRACE_OBJECT (self, "interpolated to %s", tc_str);
+ g_free (tc_str);
+
+ return TRUE;
+}
+
+/* takes cc_data and cc_data_len and attempts to fit it into a hypothetical
+ * output packet. Any leftover data is stored for later addition. Returns
+ * the number of bytes of @cc_data to place in a new output packet */
+static gint
+fit_and_scale_cc_data (GstCCConverter * self,
+ const struct cdp_fps_entry *in_fps_entry,
+ const struct cdp_fps_entry *out_fps_entry, const guint8 * cc_data,
+ guint cc_data_len, const GstVideoTimeCode * tc)
+{
+ if (!in_fps_entry || in_fps_entry->fps_n == 0) {
+ in_fps_entry = cdp_fps_entry_from_fps (self->in_fps_n, self->in_fps_d);
+ if (!in_fps_entry || in_fps_entry->fps_n == 0)
+ g_assert_not_reached ();
+ }
+
+ /* This is slightly looser than checking for the exact framerate as the cdp
+ * spec allow for 0.1% difference between framerates to be considered equal */
+ if (in_fps_entry->max_cc_count == out_fps_entry->max_cc_count) {
+ if (tc && tc->config.fps_n != 0)
+ interpolate_time_code_with_framerate (self, tc, out_fps_entry->fps_n,
+ out_fps_entry->fps_d, 1, 1, &self->current_output_timecode);
+ } else {
+ int input_frame_n, input_frame_d, output_frame_n, output_frame_d;
+ int output_time_cmp, scale_n, scale_d, rate_cmp;
+
+ /* TODO: handle input discont */
+
+ /* compute the relative frame count for each */
+ if (!gst_util_fraction_multiply (self->in_fps_d, self->in_fps_n,
+ self->input_frames, 1, &input_frame_n, &input_frame_d))
+ /* we should never overflow */
+ g_assert_not_reached ();
+
+ if (!gst_util_fraction_multiply (self->out_fps_d, self->out_fps_n,
+ self->output_frames + 1, 1, &output_frame_n, &output_frame_d))
+ /* we should never overflow */
+ g_assert_not_reached ();
+
+ output_time_cmp = gst_util_fraction_compare (input_frame_n, input_frame_d,
+ output_frame_n, output_frame_d);
+
+ /* compute the relative rates of the two framerates */
+ get_framerate_output_scale (self, in_fps_entry, &scale_n, &scale_d);
+
+ rate_cmp = gst_util_fraction_compare (scale_n, scale_d, 1, 1);
+
+ GST_TRACE_OBJECT (self, "performing framerate conversion at scale %d/%d "
+ "of cc data", scale_n, scale_d);
+
+ if (rate_cmp == 0) {
+ /* we are not scaling. Should never happen with current conditions
+ * above */
+ g_assert_not_reached ();
+ } else if (output_time_cmp == 0) {
+ /* we have completed a cycle and can reset our counters to avoid
+ * overflow. Anything that fits into the output packet will be written */
+ GST_LOG_OBJECT (self, "cycle completed, resetting frame counters");
+ self->scratch_len = 0;
+ self->input_frames = self->output_frames = 0;
+ if (tc->config.fps_n != 0) {
+ interpolate_time_code_with_framerate (self, tc, out_fps_entry->fps_n,
+ out_fps_entry->fps_d, scale_n, scale_d,
+ &self->current_output_timecode);
+ }
+ } else if (output_time_cmp < 0) {
+ /* we can't generate an output yet */
+ self->scratch_len = cc_data_len;
+ GST_DEBUG_OBJECT (self, "holding cc_data of len %u until next input "
+ "buffer", self->scratch_len);
+ memcpy (self->scratch, cc_data, self->scratch_len);
+ return 0;
+ } else if (rate_cmp != 0) {
+ /* we are changing the framerate and may overflow the max output packet
+ * size. Split them where necessary. */
+
+ if (cc_data_len / 3 > out_fps_entry->max_cc_count) {
+ /* packet would overflow, push extra bytes into the next packet */
+ self->scratch_len = cc_data_len - 3 * out_fps_entry->max_cc_count;
+ GST_DEBUG_OBJECT (self, "buffer would overflow by %u bytes (max "
+ "length %u)", self->scratch_len, 3 * out_fps_entry->max_cc_count);
+ memcpy (self->scratch, &cc_data[3 * out_fps_entry->max_cc_count],
+ self->scratch_len);
+ cc_data_len = 3 * out_fps_entry->max_cc_count;
+ } else {
+ GST_DEBUG_OBJECT (self, "packet length of %u fits within max output "
+ "packet size %u", cc_data_len, 3 * out_fps_entry->max_cc_count);
+ self->scratch_len = 0;
+ }
+ } else {
+ g_assert_not_reached ();
+ }
+
+ if (tc && tc->config.fps_n != 0)
+ interpolate_time_code_with_framerate (self, tc, out_fps_entry->fps_n,
+ out_fps_entry->fps_d, scale_n, scale_d,
+ &self->current_output_timecode);
+ }
+
+ return cc_data_len;
+}
+
/* Converts raw CEA708 cc_data and an optional timecode into CDP */
static guint
convert_cea708_cc_data_cea708_cdp_internal (GstCCConverter * self,
guint8 flags, checksum;
guint i, len;
+ GST_DEBUG_OBJECT (self, "writing out cdp packet from cc_data with length %u",
+ cc_data_len);
+
gst_byte_writer_init_with_data (&bw, cdp, cdp_len, FALSE);
gst_byte_writer_put_uint16_be_unchecked (&bw, 0x9669);
/* Write a length of 0 for now */
flags = 0x42;
/* time_code_present */
- if (tc)
+ if (tc && tc->config.fps_n > 0)
flags |= 0x80;
/* reserved */
gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
- if (tc) {
+ if (tc && tc->config.fps_n > 0) {
gst_byte_writer_put_uint8_unchecked (&bw, 0x71);
gst_byte_writer_put_uint8_unchecked (&bw, 0xc0 |
(((tc->hours % 10) & 0x3) << 4) |
static guint
convert_cea708_cdp_cea708_cc_data_internal (GstCCConverter * self,
const guint8 * cdp, guint cdp_len, guint8 cc_data[MAX_CDP_PACKET_LEN],
- GstVideoTimeCode * tc, const struct cdp_fps_entry ** out_fps_entry)
+ GstVideoTimeCode * tc, const struct cdp_fps_entry **out_fps_entry)
{
GstByteReader br;
guint16 u16;
return len;
}
+static guint
+cdp_to_cc_data (GstCCConverter * self, GstBuffer * inbuf, guint8 * out,
+ guint out_size, GstVideoTimeCode * out_tc,
+ const struct cdp_fps_entry **out_fps_entry)
+{
+ GstMapInfo in;
+ guint len = 0;
+
+ if (self->scratch_len > 0) {
+ GST_DEBUG_OBJECT (self, "copying from previous scratch buffer of %u bytes",
+ self->scratch_len);
+ memcpy (&out[len], self->scratch, self->scratch_len);
+ len += self->scratch_len;
+ }
+
+ if (inbuf) {
+ guint cc_data_len;
+
+ gst_buffer_map (inbuf, &in, GST_MAP_READ);
+
+ cc_data_len =
+ convert_cea708_cdp_cea708_cc_data_internal (self, in.data, in.size,
+ &out[len], out_tc, out_fps_entry);
+ if (cc_data_len / 3 > (*out_fps_entry)->max_cc_count) {
+ GST_WARNING_OBJECT (self, "Too many cc_data triples in CDP packet %u",
+ cc_data_len / 3);
+ cc_data_len = 3 * (*out_fps_entry)->max_cc_count;
+ }
+ len += cc_data_len;
+
+ gst_buffer_unmap (inbuf, &in);
+ self->input_frames++;
+ }
+
+ return len;
+}
static GstFlowReturn
convert_cea608_raw_cea608_s334_1a (GstCCConverter * self, GstBuffer * inbuf,
g_assert_not_reached ();
tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
- len =
- convert_cea708_cc_data_cea708_cdp_internal (self, cc_data, n * 3,
- out.data, out.size, tc_meta ? &tc_meta->tc : NULL, fps_entry);
+
+ len = fit_and_scale_cc_data (self, NULL, fps_entry, cc_data,
+ n * 3, tc_meta ? &tc_meta->tc : NULL);
+ if (len > 0) {
+ len =
+ convert_cea708_cc_data_cea708_cdp_internal (self, cc_data, len,
+ out.data, out.size, &self->current_output_timecode, fps_entry);
+ }
gst_buffer_unmap (inbuf, &in);
gst_buffer_unmap (outbuf, &out);
g_assert_not_reached ();
tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
- len =
- convert_cea708_cc_data_cea708_cdp_internal (self, cc_data, n * 3,
- out.data, out.size, tc_meta ? &tc_meta->tc : NULL, fps_entry);
+
+ len = fit_and_scale_cc_data (self, NULL, fps_entry, cc_data,
+ n * 3, tc_meta ? &tc_meta->tc : NULL);
+ if (len > 0) {
+ len =
+ convert_cea708_cc_data_cea708_cdp_internal (self, cc_data, len,
+ out.data, out.size, &self->current_output_timecode, fps_entry);
+ }
gst_buffer_unmap (inbuf, &in);
gst_buffer_unmap (outbuf, &out);
g_assert_not_reached ();
tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
- len =
- convert_cea708_cc_data_cea708_cdp_internal (self, in.data, in.size,
- out.data, out.size, tc_meta ? &tc_meta->tc : NULL, fps_entry);
+
+ len = fit_and_scale_cc_data (self, NULL, fps_entry, in.data,
+ in.size, tc_meta ? &tc_meta->tc : NULL);
+ if (len > 0) {
+ len =
+ convert_cea708_cc_data_cea708_cdp_internal (self, in.data, len,
+ out.data, out.size, &self->current_output_timecode, fps_entry);
+ }
gst_buffer_unmap (inbuf, &in);
gst_buffer_unmap (outbuf, &out);
convert_cea708_cdp_cea608_raw (GstCCConverter * self, GstBuffer * inbuf,
GstBuffer * outbuf)
{
- GstMapInfo in, out;
- guint i;
- GstVideoTimeCode tc;
- guint8 cc_data[MAX_CDP_PACKET_LEN];
- guint len, cea608 = 0;
- const struct cdp_fps_entry *fps_entry;
-
- gst_buffer_map (inbuf, &in, GST_MAP_READ);
- gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ GstMapInfo out;
+ GstVideoTimeCode tc = GST_VIDEO_TIME_CODE_INIT;
+ guint i, len = 0, cea608 = 0;
+ const struct cdp_fps_entry *in_fps_entry = NULL, *out_fps_entry;
+ guint8 cc_data[MAX_CDP_PACKET_LEN] = { 0, };
len =
- convert_cea708_cdp_cea708_cc_data_internal (self, in.data, in.size,
- cc_data, &tc, &fps_entry);
- len /= 3;
-
- if (len > fps_entry->max_cc_count) {
- GST_WARNING_OBJECT (self, "Too many cc_data triples in CDP packet %u", len);
- len = fps_entry->max_cc_count;
- }
-
- for (i = 0; i < len; i++) {
- /* We can only really copy the first field here as there can't be any
- * signalling in raw CEA608 and we must not mix the streams of different
- * fields
- */
- if (cc_data[i * 3] == 0xfc) {
- out.data[cea608 * 2] = cc_data[i * 3 + 1];
- out.data[cea608 * 2 + 1] = cc_data[i * 3 + 2];
- cea608++;
+ cdp_to_cc_data (self, inbuf, cc_data, sizeof (cc_data), &tc,
+ &in_fps_entry);
+
+ out_fps_entry = cdp_fps_entry_from_fps (self->out_fps_n, self->out_fps_d);
+ if (!out_fps_entry || out_fps_entry->fps_n == 0)
+ out_fps_entry = in_fps_entry;
+
+ len = fit_and_scale_cc_data (self, in_fps_entry, out_fps_entry, cc_data, len,
+ &tc);
+ if (len > 0) {
+ len /= 3;
+
+ gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+
+ for (i = 0; i < len; i++) {
+ /* We can only really copy the first field here as there can't be any
+ * signalling in raw CEA608 and we must not mix the streams of different
+ * fields
+ */
+ if (cc_data[i * 3] == 0xfc) {
+ out.data[cea608 * 2] = cc_data[i * 3 + 1];
+ out.data[cea608 * 2 + 1] = cc_data[i * 3 + 2];
+ cea608++;
+ }
}
- }
- gst_buffer_unmap (inbuf, &in);
- gst_buffer_unmap (outbuf, &out);
+ gst_buffer_unmap (outbuf, &out);
+ }
gst_buffer_set_size (outbuf, 2 * cea608);
- if (tc.config.fps_n != 0 && !gst_buffer_get_video_time_code_meta (inbuf))
- gst_buffer_add_video_time_code_meta (outbuf, &tc);
+ if (self->current_output_timecode.config.fps_n != 0
+ && !gst_buffer_get_video_time_code_meta (inbuf)) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ &self->current_output_timecode);
+ gst_video_time_code_increment_frame (&self->current_output_timecode);
+ }
return GST_FLOW_OK;
}
convert_cea708_cdp_cea608_s334_1a (GstCCConverter * self, GstBuffer * inbuf,
GstBuffer * outbuf)
{
- GstMapInfo in, out;
- guint i;
- GstVideoTimeCode tc;
- guint8 cc_data[MAX_CDP_PACKET_LEN];
- guint len, cea608 = 0;
- const struct cdp_fps_entry *fps_entry;
-
- gst_buffer_map (inbuf, &in, GST_MAP_READ);
- gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ GstMapInfo out;
+ GstVideoTimeCode tc = GST_VIDEO_TIME_CODE_INIT;
+ guint i, len = 0, cea608 = 0;
+ const struct cdp_fps_entry *in_fps_entry = NULL, *out_fps_entry;
+ guint8 cc_data[MAX_CDP_PACKET_LEN] = { 0, };
len =
- convert_cea708_cdp_cea708_cc_data_internal (self, in.data, in.size,
- cc_data, &tc, &fps_entry);
- len /= 3;
-
- if (len > fps_entry->max_cc_count) {
- GST_WARNING_OBJECT (self, "Too many cc_data triples in CDP packet %u", len);
- len = fps_entry->max_cc_count;
- }
-
- for (i = 0; i < len; i++) {
- if (cc_data[i * 3] == 0xfc || cc_data[i * 3] == 0xfd) {
- /* We have to assume a line offset of 0 */
- out.data[cea608 * 3] = cc_data[i * 3] == 0xfc ? 0x80 : 0x00;
- out.data[cea608 * 3 + 1] = cc_data[i * 3 + 1];
- out.data[cea608 * 3 + 2] = cc_data[i * 3 + 2];
- cea608++;
+ cdp_to_cc_data (self, inbuf, cc_data, sizeof (cc_data), &tc,
+ &in_fps_entry);
+
+ out_fps_entry = cdp_fps_entry_from_fps (self->out_fps_n, self->out_fps_d);
+ if (!out_fps_entry || out_fps_entry->fps_n == 0)
+ out_fps_entry = in_fps_entry;
+
+ len = fit_and_scale_cc_data (self, in_fps_entry, out_fps_entry, cc_data, len,
+ &tc);
+ if (len > 0) {
+ len /= 3;
+
+ gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ for (i = 0; i < len; i++) {
+ if (cc_data[i * 3] == 0xfc || cc_data[i * 3] == 0xfd) {
+ /* We have to assume a line offset of 0 */
+ out.data[cea608 * 3] = cc_data[i * 3] == 0xfc ? 0x80 : 0x00;
+ out.data[cea608 * 3 + 1] = cc_data[i * 3 + 1];
+ out.data[cea608 * 3 + 2] = cc_data[i * 3 + 2];
+ cea608++;
+ }
}
+ gst_buffer_unmap (outbuf, &out);
+ self->output_frames++;
}
- gst_buffer_unmap (inbuf, &in);
- gst_buffer_unmap (outbuf, &out);
-
gst_buffer_set_size (outbuf, 3 * cea608);
- if (tc.config.fps_n != 0 && !gst_buffer_get_video_time_code_meta (inbuf))
- gst_buffer_add_video_time_code_meta (outbuf, &tc);
+ if (self->current_output_timecode.config.fps_n != 0
+ && !gst_buffer_get_video_time_code_meta (inbuf)) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ &self->current_output_timecode);
+ gst_video_time_code_increment_frame (&self->current_output_timecode);
+ }
return GST_FLOW_OK;
}
convert_cea708_cdp_cea708_cc_data (GstCCConverter * self, GstBuffer * inbuf,
GstBuffer * outbuf)
{
- GstMapInfo in, out;
- GstVideoTimeCode tc;
- guint len;
- const struct cdp_fps_entry *fps_entry;
-
- gst_buffer_map (inbuf, &in, GST_MAP_READ);
- gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ GstMapInfo out;
+ GstVideoTimeCode tc = GST_VIDEO_TIME_CODE_INIT;
+ guint len = 0;
+ const struct cdp_fps_entry *in_fps_entry = NULL, *out_fps_entry;
+ guint8 cc_data[MAX_CDP_PACKET_LEN] = { 0, };
len =
- convert_cea708_cdp_cea708_cc_data_internal (self, in.data, in.size,
- out.data, &tc, &fps_entry);
-
- gst_buffer_unmap (inbuf, &in);
- gst_buffer_unmap (outbuf, &out);
+ cdp_to_cc_data (self, inbuf, cc_data, sizeof (cc_data), &tc,
+ &in_fps_entry);
+
+ out_fps_entry = cdp_fps_entry_from_fps (self->out_fps_n, self->out_fps_d);
+ if (!out_fps_entry || out_fps_entry->fps_n == 0)
+ out_fps_entry = in_fps_entry;
+
+ len = fit_and_scale_cc_data (self, in_fps_entry, out_fps_entry, cc_data, len,
+ &tc);
+ if (len > 0) {
+ gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ memcpy (out.data, cc_data, len);
+ gst_buffer_unmap (outbuf, &out);
+ self->output_frames++;
+ }
- if (len / 3 > fps_entry->max_cc_count) {
- GST_WARNING_OBJECT (self, "Too many cc_data triples in CDP packet %u",
- len / 3);
- len = 3 * fps_entry->max_cc_count;
+ if (self->current_output_timecode.config.fps_n != 0
+ && !gst_buffer_get_video_time_code_meta (inbuf)) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ &self->current_output_timecode);
+ gst_video_time_code_increment_frame (&self->current_output_timecode);
}
gst_buffer_set_size (outbuf, len);
- if (tc.config.fps_n != 0 && !gst_buffer_get_video_time_code_meta (inbuf))
- gst_buffer_add_video_time_code_meta (outbuf, &tc);
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+convert_cea708_cdp_cea708_cdp (GstCCConverter * self, GstBuffer * inbuf,
+ GstBuffer * outbuf)
+{
+ GstMapInfo out;
+ GstVideoTimeCode tc = GST_VIDEO_TIME_CODE_INIT;
+ guint len = 0;
+ const struct cdp_fps_entry *in_fps_entry = NULL, *out_fps_entry;
+ guint8 cc_data[MAX_CDP_PACKET_LEN] = { 0, };
+
+ len =
+ cdp_to_cc_data (self, inbuf, cc_data, sizeof (cc_data), &tc,
+ &in_fps_entry);
+
+ out_fps_entry = cdp_fps_entry_from_fps (self->out_fps_n, self->out_fps_d);
+ if (!out_fps_entry || out_fps_entry->fps_n == 0)
+ out_fps_entry = in_fps_entry;
+
+ len = fit_and_scale_cc_data (self, in_fps_entry, out_fps_entry, cc_data, len,
+ &tc);
+ if (len > 0) {
+ gst_buffer_map (outbuf, &out, GST_MAP_WRITE);
+ len =
+ convert_cea708_cc_data_cea708_cdp_internal (self, cc_data, len,
+ out.data, out.size, &self->current_output_timecode, out_fps_entry);
+
+ gst_buffer_unmap (outbuf, &out);
+ self->output_frames++;
+ }
+
+ gst_buffer_set_size (outbuf, len);
return GST_FLOW_OK;
}
gst_cc_converter_transform (GstCCConverter * self, GstBuffer * inbuf,
GstBuffer * outbuf)
{
- GstVideoTimeCodeMeta *tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
+ GstVideoTimeCodeMeta *tc_meta = NULL;
GstFlowReturn ret = GST_FLOW_OK;
GST_DEBUG_OBJECT (self, "Converting %" GST_PTR_FORMAT " from %u to %u", inbuf,
self->input_caption_type, self->output_caption_type);
+ if (inbuf)
+ tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
+
+ if (tc_meta) {
+ if (self->current_output_timecode.config.fps_n <= 0) {
+ /* XXX: this assumes the input time codes are well-formed and increase
+ * at the rate of one frame for each input buffer */
+ const struct cdp_fps_entry *in_fps_entry;
+ gint scale_n, scale_d;
+
+ in_fps_entry = cdp_fps_entry_from_fps (self->in_fps_n, self->in_fps_d);
+ if (!in_fps_entry || in_fps_entry->fps_n == 0)
+ scale_n = scale_d = 1;
+ else
+ get_framerate_output_scale (self, in_fps_entry, &scale_n, &scale_d);
+
+ if (tc_meta)
+ interpolate_time_code_with_framerate (self, &tc_meta->tc,
+ self->out_fps_n, self->out_fps_d, scale_n, scale_d,
+ &self->current_output_timecode);
+ }
+ }
+
switch (self->input_caption_type) {
case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
ret = convert_cea708_cdp_cea708_cc_data (self, inbuf, outbuf);
break;
case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
+ ret = convert_cea708_cdp_cea708_cdp (self, inbuf, outbuf);
+ break;
default:
g_assert_not_reached ();
break;
break;
}
- if (ret != GST_FLOW_OK)
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (self, "returning %s", gst_flow_get_name (ret));
return ret;
-
- if (tc_meta)
- gst_buffer_add_video_time_code_meta (outbuf, &tc_meta->tc);
+ }
GST_DEBUG_OBJECT (self, "Converted to %" GST_PTR_FORMAT, outbuf);
- return gst_buffer_get_size (outbuf) >
- 0 ? GST_FLOW_OK : GST_BASE_TRANSFORM_FLOW_DROPPED;
+ if (gst_buffer_get_size (outbuf) > 0) {
+ if (self->current_output_timecode.config.fps_n > 0) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ &self->current_output_timecode);
+ /* XXX: discont handling? */
+ gst_video_time_code_increment_frame (&self->current_output_timecode);
+ }
+
+ return GST_FLOW_OK;
+ } else {
+ return GST_BASE_TRANSFORM_FLOW_DROPPED;
+ }
+}
+
+static gboolean
+gst_cc_converter_transform_meta (GstBaseTransform * base, GstBuffer * outbuf,
+ GstMeta * meta, GstBuffer * inbuf)
+{
+ const GstMetaInfo *info = meta->info;
+
+ /* we do this manually for framerate scaling */
+ if (info->api == GST_VIDEO_TIME_CODE_META_API_TYPE)
+ return FALSE;
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->transform_meta (base, outbuf,
+ meta, inbuf);
}
static GstFlowReturn
*outbuf = NULL;
base->queued_buf = NULL;
- if (!inbuf) {
+ if (!inbuf && self->scratch_len == 0) {
return GST_FLOW_OK;
}
*outbuf = inbuf;
ret = GST_FLOW_OK;
} else {
- ret = bclass->prepare_output_buffer (base, inbuf, outbuf);
-
- if (ret != GST_FLOW_OK || *outbuf == NULL)
+ *outbuf = gst_buffer_new_allocate (NULL, MAX_CDP_PACKET_LEN, NULL);
+ if (*outbuf == NULL)
goto no_buffer;
+ if (inbuf)
+ gst_buffer_replace (&self->previous_buffer, inbuf);
+
+ if (bclass->copy_metadata) {
+ if (!bclass->copy_metadata (base, self->previous_buffer, *outbuf)) {
+ /* something failed, post a warning */
+ GST_ELEMENT_WARNING (self, STREAM, NOT_IMPLEMENTED,
+ ("could not copy metadata"), (NULL));
+ }
+ }
+
ret = gst_cc_converter_transform (self, inbuf, *outbuf);
- gst_buffer_unref (inbuf);
+
+ if (inbuf)
+ gst_buffer_unref (inbuf);
}
return ret;
no_buffer:
{
- gst_buffer_unref (inbuf);
+ if (inbuf)
+ gst_buffer_unref (inbuf);
*outbuf = NULL;
- GST_WARNING_OBJECT (self, "could not get buffer from pool: %s",
- gst_flow_get_name (ret));
- return ret;
+ GST_WARNING_OBJECT (self, "could not allocate buffer");
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_cc_converter_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstCCConverter *self = GST_CCCONVERTER (trans);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ GST_DEBUG_OBJECT (self, "received EOS");
+
+ while (self->scratch_len > 0) {
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+
+ outbuf = gst_buffer_new_allocate (NULL, MAX_CDP_PACKET_LEN, NULL);
+
+ ret = gst_cc_converter_transform (self, NULL, outbuf);
+ if (ret == GST_BASE_TRANSFORM_FLOW_DROPPED) {
+ /* try to move the output along */
+ self->input_frames++;
+ gst_buffer_unref (outbuf);
+ continue;
+ } else if (ret != GST_FLOW_OK)
+ break;
+
+ ret = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (trans), outbuf);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+ /* fallthrough */
+ case GST_EVENT_FLUSH_START:
+ self->scratch_len = 0;
+ self->input_frames = 0;
+ self->output_frames = 0;
+ gst_video_time_code_clear (&self->current_output_timecode);
+ gst_clear_buffer (&self->previous_buffer);
+ break;
+ default:
+ break;
}
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
}
static gboolean
/* Resetting this is not really needed but makes debugging easier */
self->cdp_hdr_sequence_cntr = 0;
+ self->current_output_timecode = (GstVideoTimeCode) GST_VIDEO_TIME_CODE_INIT;
+ self->input_frames = 0;
+ self->output_frames = 0;
+ self->scratch_len = 0;
+
+ return TRUE;
+}
+
+static gboolean
+gst_cc_converter_stop (GstBaseTransform * base)
+{
+ GstCCConverter *self = GST_CCCONVERTER (base);
+
+ gst_video_time_code_clear (&self->current_output_timecode);
+ gst_clear_buffer (&self->previous_buffer);
return TRUE;
}
gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
basetransform_class->start = GST_DEBUG_FUNCPTR (gst_cc_converter_start);
+ basetransform_class->stop = GST_DEBUG_FUNCPTR (gst_cc_converter_stop);
+ basetransform_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_cc_converter_sink_event);
basetransform_class->transform_size =
GST_DEBUG_FUNCPTR (gst_cc_converter_transform_size);
basetransform_class->transform_caps =
basetransform_class->fixate_caps =
GST_DEBUG_FUNCPTR (gst_cc_converter_fixate_caps);
basetransform_class->set_caps = GST_DEBUG_FUNCPTR (gst_cc_converter_set_caps);
+ basetransform_class->transform_meta =
+ GST_DEBUG_FUNCPTR (gst_cc_converter_transform_meta);
basetransform_class->generate_output =
GST_DEBUG_FUNCPTR (gst_cc_converter_generate_output);
basetransform_class->passthrough_on_same_caps = TRUE;
#include <string.h>
+enum CheckConversionFlags
+{
+ FLAG_NONE,
+ FLAG_SEND_EOS = 1,
+};
+
GST_START_TEST (cdp_requires_framerate)
{
GstHarness *h;
GST_FLOW_NOT_NEGOTIATED);
/* Now set a framerate only on the sink caps, this should still fail:
- * We can't do framerate conversion!
+ * We can't come up with a framerate
*/
gst_harness_set_sink_caps_str (h,
"closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)30/1");
fail_unless_equals_int (gst_harness_push (h, gst_buffer_ref (buffer)),
GST_FLOW_NOT_NEGOTIATED);
- /* Then try with a framerate, this should work now */
+ /* Then try with a change of framerate, this should work */
gst_harness_set_sink_caps_str (h,
"closedcaption/x-cea-708,format=(string)cdp");
gst_harness_set_src_caps_str (h,
GST_END_TEST;
+GST_START_TEST (framerate_changes)
+{
+ GstHarness *h;
+ GstBuffer *buffer;
+ GstMapInfo map;
+
+ h = gst_harness_new ("ccconverter");
+
+ buffer = gst_buffer_new_and_alloc (3);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ map.data[0] = 0x00;
+ map.data[1] = 0x80;
+ map.data[2] = 0x80;
+ gst_buffer_unmap (buffer, &map);
+
+ /* success case */
+ gst_harness_set_src_caps_str (h,
+ "closedcaption/x-cea-708,format=(string)cc_data,framerate=(fraction)30/1");
+ gst_harness_set_sink_caps_str (h,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1");
+ fail_unless_equals_int (gst_harness_push (h, gst_buffer_ref (buffer)),
+ GST_FLOW_OK);
+
+ /* test an invalid cdp framerate */
+ gst_harness_set_sink_caps_str (h,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)1111/1");
+ fail_unless_equals_int (gst_harness_push (h, buffer),
+ GST_FLOW_NOT_NEGOTIATED);
+
+ gst_harness_teardown (h);
+}
+
+GST_END_TEST;
+
static void
-check_conversion (const guint8 * in, guint in_len, const guint8 * out,
- guint out_len, const gchar * in_caps, const gchar * out_caps,
- const GstVideoTimeCode * in_tc, const GstVideoTimeCode * out_tc)
+check_conversion_multiple (guint n_in, const guint8 ** in, guint * in_len,
+ guint n_out, const guint8 ** out, guint * out_len, const gchar * in_caps,
+ const gchar * out_caps, const GstVideoTimeCode ** in_tc,
+ const GstVideoTimeCode ** out_tc, enum CheckConversionFlags flags)
{
GstHarness *h;
GstBuffer *buffer;
GstVideoTimeCodeMeta *out_tc_meta;
+ int i = 0;
h = gst_harness_new ("ccconverter");
gst_harness_set_src_caps_str (h, in_caps);
gst_harness_set_sink_caps_str (h, out_caps);
- buffer =
- gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, (gpointer) in,
- in_len, 0, in_len, NULL, NULL);
- if (in_tc)
- gst_buffer_add_video_time_code_meta (buffer, in_tc);
-
- buffer = gst_harness_push_and_pull (h, buffer);
-
- fail_unless (buffer != NULL);
- gst_check_buffer_data (buffer, out, out_len);
- out_tc_meta = gst_buffer_get_video_time_code_meta (buffer);
- fail_if (out_tc_meta == NULL && out_tc != NULL);
- if (out_tc_meta)
- fail_unless (gst_video_time_code_compare (&out_tc_meta->tc, out_tc) == 0);
-
- gst_buffer_unref (buffer);
+ for (i = 0; i < n_in; i++) {
+ buffer =
+ gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, (gpointer) in[i],
+ in_len[i], 0, in_len[i], NULL, NULL);
+ GST_INFO ("pushing buffer %u %" GST_PTR_FORMAT, i, buffer);
+ if (in_tc && in_tc[i])
+ gst_buffer_add_video_time_code_meta (buffer, in_tc[i]);
+ fail_unless_equals_int (gst_harness_push (h, buffer), GST_FLOW_OK);
+ }
+
+ if (flags & FLAG_SEND_EOS)
+ fail_unless (gst_harness_push_event (h, gst_event_new_eos ()));
+
+ for (i = 0; i < n_out; i++) {
+ buffer = gst_harness_pull (h);
+
+ GST_INFO ("pulled buffer %u %" GST_PTR_FORMAT, i, buffer);
+ fail_unless (buffer != NULL);
+ gst_check_buffer_data (buffer, out[i], out_len[i]);
+ out_tc_meta = gst_buffer_get_video_time_code_meta (buffer);
+ fail_if (out_tc_meta == NULL && out_tc != NULL && out_tc[i] != NULL);
+ if (out_tc_meta && out_tc && out_tc[i])
+ fail_unless (gst_video_time_code_compare (&out_tc_meta->tc,
+ out_tc[i]) == 0);
+
+ gst_buffer_unref (buffer);
+ }
gst_harness_teardown (h);
}
static void
+check_conversion (const guint8 * in, guint in_len, const guint8 * out,
+ guint out_len, const gchar * in_caps, const gchar * out_caps,
+ const GstVideoTimeCode * in_tc, const GstVideoTimeCode * out_tc)
+{
+ check_conversion_multiple (1, &in, &in_len, 1, &out, &out_len, in_caps,
+ out_caps, &in_tc, &out_tc, 0);
+}
+
+static void
check_conversion_tc_passthrough (const guint8 * in, guint in_len,
const guint8 * out, guint out_len, const gchar * in_caps,
const gchar * out_caps)
GST_START_TEST (convert_cea708_cdp_cea708_cc_data_too_big)
{
+ /* tests that too large input is truncated */
const guint8 in[] =
{ 0x96, 0x69, 0x2e, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xeb, 0xfc, 0x80, 0x80,
0xfe, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xfe, 0x80, 0x80,
GST_END_TEST;
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_double_framerate)
+{
+ /* tests that packets are split exactly in half when doubling the framerate */
+ const guint8 in1[] =
+ { 0x96, 0x69, 0x49, 0x5f, 0x43, 0x00, 0x00, 0x72, 0xf4, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfe, 0x00, 0x00, 0xfc, 0x00, 0x00, 0xfc, 0x00, 0x00,
+ 0xfc, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0xd2
+ };
+ const guint8 *in[] = { in1 };
+ guint in_len[] = { sizeof (in1) };
+ GstVideoTimeCode in_tc1;
+ const GstVideoTimeCode *in_tc[] = { &in_tc1 };
+
+ const guint8 out1[] = { 0x96, 0x69, 0x30, 0x8f, 0xc3, 0x00, 0x00, 0x71, 0xd0,
+ 0xa0, 0x30, 0x00, 0x72, 0xea, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74,
+ 0x00, 0x00, 0xe4
+ };
+ const guint8 out2[] = { 0x96, 0x69, 0x30, 0x8f, 0xc3, 0x00, 0x01, 0x71, 0xd0,
+ 0xa0, 0x30, 0x10, 0x72, 0xea, 0xfe, 0x00, 0x00, 0xfc, 0x00, 0x00, 0xfc,
+ 0x00, 0x00, 0xfc, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74,
+ 0x00, 0x01, 0xca
+ };
+ const guint8 *out[] = { out1, out2 };
+ guint out_len[] = { sizeof (out1), sizeof (out2) };
+ GstVideoTimeCode out_tc1, out_tc2;
+ const GstVideoTimeCode *out_tc[] = { &out_tc1, &out_tc2 };
+
+ gst_video_time_code_init (&in_tc1, 30, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 4, 0);
+ fail_unless (gst_video_time_code_is_valid (&in_tc1));
+
+ gst_video_time_code_init (&out_tc1, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 8, 0);
+ fail_unless (gst_video_time_code_is_valid (&out_tc1));
+ gst_video_time_code_init (&out_tc2, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 9, 0);
+ fail_unless (gst_video_time_code_is_valid (&out_tc2));
+
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)30/1",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ in_tc, out_tc, 0);
+
+ gst_video_time_code_clear (&in_tc1);
+ gst_video_time_code_clear (&out_tc1);
+ gst_video_time_code_clear (&out_tc2);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_half_framerate)
+{
+ /* tests that two input packets are merged together when halving the
+ * framerate */
+ const guint8 in1[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0x7a
+ };
+ const guint8 in2[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x01, 0x72, 0xea,
+ 0xfe, 0x00, 0x00, 0xfc, 0x00, 0x00, 0xfc, 0x00, 0x00, 0xfc, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x01, 0x70
+ };
+ const guint8 *in[] = { in1, in2 };
+ guint in_len[] = { sizeof (in1), sizeof (in2) };
+ GstVideoTimeCode in_tc1, in_tc2;
+ const GstVideoTimeCode *in_tc[] = { &in_tc1, &in_tc2 };
+
+ const guint8 out1[] =
+ { 0x96, 0x69, 0x4e, 0x5f, 0xc3, 0x00, 0x00, 0x71, 0xd0, 0xa0, 0x30, 0x00,
+ 0x72, 0xf4, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xfe, 0x00, 0x00, 0xfc,
+ 0x00, 0x00, 0xfc, 0x00, 0x00, 0xfc, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0x74, 0x00, 0x00, 0x2a
+ };
+ const guint8 *out[] = { out1 };
+ guint out_len[] = { sizeof (out1) };
+ GstVideoTimeCode out_tc1;
+ const GstVideoTimeCode *out_tc[] = { &out_tc1 };
+
+ gst_video_time_code_init (&in_tc1, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 8, 0);
+ fail_unless (gst_video_time_code_is_valid (&in_tc1));
+ gst_video_time_code_init (&in_tc2, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 8, 0);
+ fail_unless (gst_video_time_code_is_valid (&in_tc2));
+
+ gst_video_time_code_init (&out_tc1, 30, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 1, 2, 3, 4, 0);
+ fail_unless (gst_video_time_code_is_valid (&out_tc1));
+
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)30/1",
+ in_tc, out_tc, 0);
+
+ gst_video_time_code_clear (&in_tc1);
+ gst_video_time_code_clear (&in_tc2);
+ gst_video_time_code_clear (&out_tc1);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_max_merge)
+{
+ /* check that a low framerate packet can be split into 3 high framerate
+ * packets with the extra data on the third input packet being placed at the
+ * beginning of the second output packet */
+ const guint8 in1[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0x7a
+ };
+ /* enough input to fully cover two output packets. Extra is discarded */
+ const guint8 *in[] = { in1, in1, in1, in1, in1, in1, in1 };
+ guint in_len[] =
+ { sizeof (in1), sizeof (in1), sizeof (in1), sizeof (in1), sizeof (in1),
+ sizeof (in1)
+ };
+
+ const guint8 out1[] =
+ { 0x96, 0x69, 0x58, 0x1f, 0x43, 0x00, 0x00, 0x72, 0xf9, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0x74, 0x00, 0x00, 0x12
+ };
+ const guint8 out2[] =
+ { 0x96, 0x69, 0x58, 0x1f, 0x43, 0x00, 0x01, 0x72, 0xf9, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0x74, 0x00, 0x01, 0x1a
+ };
+ const guint8 *out[] = { out1, out2 };
+ guint out_len[] = { sizeof (out1), sizeof (out2) };
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)24000/1001",
+ NULL, NULL, 0);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_max_split)
+{
+ /* test that a low framerate stream produces multiple output packets for a
+ * high framerate */
+ const guint8 in1[] =
+ { 0x96, 0x69, 0x58, 0x1f, 0x43, 0x00, 0x00, 0x72, 0xf9, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0x74, 0x00, 0x00, 0x12
+ };
+ const guint8 *in[] = { in1, in1 };
+ guint in_len[] = { sizeof (in1), sizeof (in1) };
+
+ const guint8 out1[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0x7a
+ };
+ const guint8 out2[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x01, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x01, 0x78
+ };
+ const guint8 out3[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x02, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x02, 0x6c
+ };
+ const guint8 out4[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x03, 0x72, 0xea,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x03, 0x74
+ };
+ const guint8 *out[] = { out1, out2, out3, out4 };
+ guint out_len[] =
+ { sizeof (out1), sizeof (out2), sizeof (out3), sizeof (out4) };
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)24000/1001",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ NULL, NULL, 0);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_max_split_eos)
+{
+ /* test that a low framerate stream produces multiple output packets for a
+ * high framerate and that an EOS will push the pending data */
+ const guint8 in1[] =
+ { 0x96, 0x69, 0x58, 0x1f, 0x43, 0x00, 0x00, 0x72, 0xf9, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xfc, 0x80, 0x80,
+ 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf9, 0x00, 0x00,
+ 0x74, 0x00, 0x00, 0x12
+ };
+ const guint8 *in[] = { in1 };
+ guint in_len[] = { sizeof (in1) };
+
+ const guint8 out1[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0x7a
+ };
+ const guint8 out2[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x01, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x01, 0x78
+ };
+ const guint8 out3[] = { 0x96, 0x69, 0x2b, 0x8f, 0x43, 0x00, 0x02, 0x72, 0xea,
+ 0xfc, 0x80, 0x80, 0xfe, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf9, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00,
+ 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x02, 0x75
+ };
+ const guint8 *out[] = { out1, out2, out3 };
+ guint out_len[] = { sizeof (out1), sizeof (out2), sizeof (out3) };
+
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)24000/1001",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ NULL, NULL, FLAG_SEND_EOS);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (convert_cea708_cdp_cea708_cdp_from_drop_frame_scaling)
+{
+ const guint8 in1[] = { 0x96, 0x69, 0x10, 0x7f, 0x43, 0x00, 0x00, 0x72, 0xe1,
+ 0xfc, 0x80, 0x80, 0x74, 0x00, 0x00, 0x7a
+ };
+ const guint8 *in[] = { in1, in1 };
+ guint in_len[] = { sizeof (in1), sizeof (in1) };
+ GstVideoTimeCode in_tc1, in_tc2;
+ const GstVideoTimeCode *in_tc[] = { &in_tc1, &in_tc2 };
+
+ const guint8 out1[] =
+ { 0x96, 0x69, 0x30, 0x8f, 0xc3, 0x00, 0x00, 0x71, 0xc0, 0x90, 0x12, 0x12,
+ 0x72, 0xea, 0xfc, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x00, 0x16
+ };
+ const guint8 out2[] =
+ { 0x96, 0x69, 0x30, 0x8f, 0xc3, 0x00, 0x01, 0x71, 0xc0, 0xa0, 0x00, 0x00,
+ 0x72, 0xea, 0xfc, 0x80, 0x80, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8,
+ 0x00, 0x00, 0xf8, 0x00, 0x00, 0xf8, 0x00, 0x00, 0x74, 0x00, 0x01, 0x28
+ };
+ const guint8 *out[] = { out1, out2 };
+ guint out_len[] = { sizeof (out1), sizeof (out2) };
+ GstVideoTimeCode out_tc1, out_tc2;
+ const GstVideoTimeCode *out_tc[] = { &out_tc1, &out_tc2 };
+
+ gst_video_time_code_init (&in_tc1, 60000, 1001, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME, 0, 1, 59, 59, 0);
+ fail_unless (gst_video_time_code_is_valid (&in_tc1));
+
+ gst_video_time_code_init (&in_tc2, 60000, 1001, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME, 0, 2, 0, 4, 0);
+ fail_unless (gst_video_time_code_is_valid (&in_tc2));
+
+ gst_video_time_code_init (&out_tc1, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 0, 1, 59, 59, 0);
+ fail_unless (gst_video_time_code_is_valid (&out_tc1));
+
+ gst_video_time_code_init (&out_tc2, 60, 1, NULL,
+ GST_VIDEO_TIME_CODE_FLAGS_NONE, 0, 2, 0, 0, 0);
+ fail_unless (gst_video_time_code_is_valid (&out_tc2));
+
+ check_conversion_multiple (G_N_ELEMENTS (in_len), in, in_len,
+ G_N_ELEMENTS (out_len), out, out_len,
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60000/1001",
+ "closedcaption/x-cea-708,format=(string)cdp,framerate=(fraction)60/1",
+ in_tc, out_tc, FLAG_SEND_EOS);
+
+ gst_video_time_code_clear (&in_tc1);
+ gst_video_time_code_clear (&out_tc1);
+}
+
+GST_END_TEST;
+
static Suite *
ccextractor_suite (void)
{
tcase_add_test (tc, cdp_requires_framerate);
tcase_add_test (tc, framerate_passthrough);
+ tcase_add_test (tc, framerate_changes);
tcase_add_test (tc, convert_cea608_raw_cea608_s334_1a);
tcase_add_test (tc, convert_cea608_raw_cea708_cc_data);
tcase_add_test (tc, convert_cea608_raw_cea708_cdp);
tcase_add_test (tc, convert_cea708_cdp_cea608_s334_1a);
tcase_add_test (tc, convert_cea708_cdp_cea708_cc_data);
tcase_add_test (tc, convert_cea708_cdp_cea708_cc_data_too_big);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_half_framerate);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_double_framerate);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_max_merge);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_max_split);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_max_split_eos);
+ tcase_add_test (tc, convert_cea708_cdp_cea708_cdp_from_drop_frame_scaling);
return s;
}