GstVideoCodecFrame * frame);
static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
GstQuery * query);
+static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
+ GstEvent * event);
#define gst_jpeg_dec_parent_class parent_class
G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
#endif
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpeg_dec_src_pad_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpeg_dec_sink_pad_template);
gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
- "Codec/Decoder/Image",
- "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
+ "Codec/Decoder/Image", "Decode images from JPEG format",
+ "Wim Taymans <wim@fluendo.com>");
vdec_class->start = gst_jpeg_dec_start;
vdec_class->stop = gst_jpeg_dec_stop;
vdec_class->set_format = gst_jpeg_dec_set_format;
vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
+ vdec_class->sink_event = gst_jpeg_dec_sink_event;
GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
static boolean
gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
{
- GstJpegDec *dec;
-
- dec = CINFO_GET_JPEGDEC (cinfo);
- g_return_val_if_fail (dec != NULL, FALSE);
- g_return_val_if_fail (dec->current_frame != NULL, FALSE);
- g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
-
- cinfo->src->next_input_byte = dec->current_frame_map.data;
- cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
-
- return TRUE;
+ /* We pass in full frame initially, if this get called, the frame is most likely
+ * corrupted */
+ return FALSE;
}
static void
goto have_full_frame;
}
if (value == 0xd8) {
- /* Skip this frame if we found another SOI marker */
- GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
+ GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
+
+ /* clear parse state */
+ dec->saw_header = FALSE;
dec->parse_resync = FALSE;
- size = offset + 2;
- goto drop_frame;
+ toadd = offset;
+ goto have_full_frame;
}
{
GstJpegDec *jpeg = GST_JPEG_DEC (dec);
- if (dec->input_segment.format == GST_FORMAT_TIME)
- gst_video_decoder_set_packetized (dec, TRUE);
- else
- gst_video_decoder_set_packetized (dec, FALSE);
-
if (jpeg->input_state)
gst_video_codec_state_unref (jpeg->input_state);
jpeg->input_state = gst_video_codec_state_ref (state);
}
static void
-gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar *rows[16];
guchar **scanarray[1] = { rows };
GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
width = GST_VIDEO_FRAME_WIDTH (frame);
- height = GST_VIDEO_FRAME_HEIGHT (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ if (field == 2) {
+ base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ }
+
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
- rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
}
static void
-gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar *r_rows[16], *g_rows[16], *b_rows[16];
guchar **scanarray[3] = { r_rows, g_rows, b_rows };
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
width = GST_VIDEO_FRAME_WIDTH (frame);
- height = GST_VIDEO_FRAME_HEIGHT (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
- for (i = 0; i < 3; i++)
+ for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
+ if (field == 2)
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
- rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
static void
gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
- gint r_h, gint comp)
+ gint r_h, gint comp, guint field, guint num_fields)
{
guchar *y_rows[16], *u_rows[16], *v_rows[16];
guchar **scanarray[3] = { y_rows, u_rows, v_rows };
gint i, j, k;
gint lines;
guchar *base[3], *last[3];
- gint stride[3];
+ gint rowsize[3], stride[3];
gint width, height;
GST_DEBUG_OBJECT (dec,
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
- stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
+ rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+
+ if (field == 2) {
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
}
memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
if (G_LIKELY (lines > 0)) {
for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j], stride[0]);
+ memcpy (base[0], y_rows[j], rowsize[0]);
base[0] += stride[0];
}
if (r_v == 2) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j + 1], stride[0]);
+ memcpy (base[0], y_rows[j + 1], rowsize[0]);
base[0] += stride[0];
}
}
if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
if (r_h == 2) {
- memcpy (base[1], u_rows[k], stride[1]);
- memcpy (base[2], v_rows[k], stride[2]);
+ memcpy (base[1], u_rows[k], rowsize[1]);
+ memcpy (base[2], v_rows[k], rowsize[2]);
} else if (r_h == 1) {
- hresamplecpy1 (base[1], u_rows[k], stride[1]);
- hresamplecpy1 (base[2], v_rows[k], stride[2]);
+ hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
+ hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
} else {
/* FIXME: implement (at least we avoid crashing by doing nothing) */
}
}
static GstFlowReturn
-gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar **line[3]; /* the jpeg line buffer */
guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
- stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+
+ if (field == 2) {
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
}
/* let jpeglib decode directly into our final buffer */
}
static void
-gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
+gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
+ gboolean interlaced)
{
GstVideoCodecState *outstate;
GstVideoInfo *info;
break;
}
+ if (interlaced) {
+ outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
+ }
+
gst_video_codec_state_unref (outstate);
gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
}
static GstFlowReturn
-gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
+gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
{
- GstFlowReturn ret = GST_FLOW_OK;
- GstJpegDec *dec = (GstJpegDec *) bdec;
- GstVideoFrame vframe;
- gint width, height;
- gint r_h, r_v;
- guint code, hdr_ok;
- gboolean need_unmap = TRUE;
- GstVideoCodecState *state = NULL;
- gboolean release_frame = TRUE;
-
- dec->current_frame = frame;
- gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
- gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
-
- if (setjmp (dec->jerr.setjmp_buffer)) {
- code = dec->jerr.pub.msg_code;
-
- if (code == JERR_INPUT_EOF) {
- GST_DEBUG ("jpeg input EOF error, we probably need more data");
- goto need_more_data;
- }
- goto decode_error;
- }
+ G_GNUC_UNUSED GstFlowReturn ret;
+ guint r_h, r_v, hdr_ok;
/* read header */
hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
break;
}
- width = dec->cinfo.output_width;
- height = dec->cinfo.output_height;
-
- if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
- height < MIN_HEIGHT || height > MAX_HEIGHT))
+ if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
+ dec->cinfo.output_width > MAX_WIDTH ||
+ dec->cinfo.output_height < MIN_HEIGHT ||
+ dec->cinfo.output_height > MAX_HEIGHT))
goto wrong_size;
- gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
-
- state = gst_video_decoder_get_output_state (bdec);
- ret = gst_video_decoder_allocate_output_frame (bdec, frame);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto alloc_failed;
-
- if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
- GST_MAP_READWRITE))
- goto alloc_failed;
+ return GST_FLOW_OK;
- if (setjmp (dec->jerr.setjmp_buffer)) {
- code = dec->jerr.pub.msg_code;
- gst_video_frame_unmap (&vframe);
- goto decode_error;
+/* ERRORS */
+wrong_size:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
+ dec->cinfo.output_height), ret);
+ return GST_FLOW_ERROR;
+ }
+components_not_supported:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("number of components not supported: %d (max 3)",
+ dec->cinfo.num_components), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
}
+unsupported_colorspace:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture has unknown or unsupported colourspace"), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
+ }
+invalid_yuvrgbgrayscale:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
+ }
+}
- GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
+static GstFlowReturn
+gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
+ guint height, guint field, guint num_fields)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
- gst_jpeg_dec_decode_rgb (dec, &vframe);
+ gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
- gst_jpeg_dec_decode_grayscale (dec, &vframe);
+ gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
} else {
- GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
+ GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
dec->cinfo.rec_outbuf_height);
/* For some widths jpeglib requires more horizontal padding than I420
|| dec->cinfo.comp_info[2].h_samp_factor != 1)) {
GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
"indirect decoding using extra buffer copy");
- gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
- dec->cinfo.num_components);
+ gst_jpeg_dec_decode_indirect (dec, vframe,
+ dec->cinfo.comp_info[0].v_samp_factor,
+ dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
+ field, num_fields);
} else {
- ret = gst_jpeg_dec_decode_direct (dec, &vframe);
+ ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
+ }
+ }
+
+ GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ jpeg_abort_decompress (&dec->cinfo);
+ } else {
+ jpeg_finish_decompress (&dec->cinfo);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstJpegDec *dec = (GstJpegDec *) bdec;
+ GstVideoFrame vframe;
+ gint num_fields; /* number of fields (1 or 2) */
+ gint output_height; /* height of output image (one or two fields) */
+ gint height; /* height of current frame (whole image or a field) */
+ gint width;
+ guint code;
+ gboolean need_unmap = TRUE;
+ GstVideoCodecState *state = NULL;
+ gboolean release_frame = TRUE;
+ gboolean has_eoi;
+ guint8 *data;
+ gsize nbytes;
+
+ gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
+
+ data = dec->current_frame_map.data;
+ nbytes = dec->current_frame_map.size;
+ has_eoi = ((data[nbytes - 2] != 0xff) || (data[nbytes - 1] != 0xd9));
+
+ /* some cameras fail to send an end-of-image marker (EOI),
+ * add it if that is the case. */
+ if (!has_eoi) {
+ GstMapInfo map;
+ GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
+
+ /* unmap, will add EOI and remap at the end */
+ gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
+
+ gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
+ map.data[0] = 0xff;
+ map.data[1] = 0xd9;
+ gst_buffer_unmap (eoibuf, &map);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto decode_direct_failed;
+ /* append to input buffer, and remap */
+ frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
+
+ gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
+ GST_DEBUG ("fixup EOI marker added");
+ }
+
+ dec->current_frame = frame;
+ dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
+ dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
+
+ if (setjmp (dec->jerr.setjmp_buffer)) {
+ code = dec->jerr.pub.msg_code;
+
+ if (code == JERR_INPUT_EOF) {
+ GST_DEBUG ("jpeg input EOF error, we probably need more data");
+ goto need_more_data;
}
+ goto decode_error;
}
- gst_video_frame_unmap (&vframe);
+ /* read header and check values */
+ ret = gst_jpeg_dec_prepare_decode (dec);
+ if (G_UNLIKELY (ret == GST_FLOW_ERROR))
+ goto done;
+
+ width = dec->cinfo.output_width;
+ height = dec->cinfo.output_height;
+
+ /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
+ * to see if there are two SOF markers in the packet to detect this) */
+ if (gst_video_decoder_get_packetized (bdec) &&
+ dec->input_state->info.height > height &&
+ dec->input_state->info.height <= (height * 2)
+ && dec->input_state->info.width == width) {
+ GST_LOG_OBJECT (dec,
+ "looks like an interlaced image: "
+ "input width/height of %dx%d with JPEG frame width/height of %dx%d",
+ dec->input_state->info.width, dec->input_state->info.height, width,
+ height);
+ output_height = dec->input_state->info.height;
+ height = dec->input_state->info.height / 2;
+ num_fields = 2;
+ GST_LOG_OBJECT (dec, "field height=%d", height);
+ } else {
+ output_height = height;
+ num_fields = 1;
+ }
+
+ gst_jpeg_dec_negotiate (dec, width, output_height,
+ dec->cinfo.jpeg_color_space, num_fields == 2);
+
+ state = gst_video_decoder_get_output_state (bdec);
+ ret = gst_video_decoder_allocate_output_frame (bdec, frame);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto alloc_failed;
+
+ if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
+ GST_MAP_READWRITE))
+ goto alloc_failed;
+
+ if (setjmp (dec->jerr.setjmp_buffer)) {
+ code = dec->jerr.pub.msg_code;
+ gst_video_frame_unmap (&vframe);
+ goto decode_error;
+ }
+
+ GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
+ num_fields);
+
+ ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
if (setjmp (dec->jerr.setjmp_buffer)) {
code = dec->jerr.pub.msg_code;
+ gst_video_frame_unmap (&vframe);
goto decode_error;
}
- GST_LOG_OBJECT (dec, "decompressing finished");
- jpeg_finish_decompress (&dec->cinfo);
+ /* decode second field if there is one */
+ if (num_fields == 2) {
+ GstVideoFormat field2_format;
+
+ /* skip any chunk or padding bytes before the next SOI marker; both fields
+ * are in one single buffer here, so direct access should be fine here */
+ while (dec->jsrc.pub.bytes_in_buffer > 2 &&
+ GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
+ --dec->jsrc.pub.bytes_in_buffer;
+ ++dec->jsrc.pub.next_input_byte;
+ }
+
+ if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
+ /* FIXME: post a warning message here? */
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+
+ /* check if format has changed for the second field */
+ switch (dec->cinfo.jpeg_color_space) {
+ case JCS_RGB:
+ field2_format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case JCS_GRAYSCALE:
+ field2_format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ default:
+ field2_format = GST_VIDEO_FORMAT_I420;
+ break;
+ }
+
+ GST_LOG_OBJECT (dec,
+ "got for second field of interlaced image: "
+ "input width/height of %dx%d with JPEG frame width/height of %dx%d",
+ dec->input_state->info.width, dec->input_state->info.height,
+ dec->cinfo.output_width, dec->cinfo.output_height);
+
+ if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
+ GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
+ GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
+ field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
+ GST_WARNING_OBJECT (dec, "second field has different format than first");
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+
+ ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+ }
+ gst_video_frame_unmap (&vframe);
gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
ret = gst_video_decoder_finish_frame (bdec, frame);
goto exit;
}
/* ERRORS */
-wrong_size:
- {
- GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
- (_("Failed to decode JPEG image")),
- ("Picture is too small or too big (%ux%u)", width, height), ret);
- ret = GST_FLOW_ERROR;
- goto done;
- }
decode_error:
{
gchar err_msg[JMSG_LENGTH_MAX];
goto done;
}
-decode_direct_failed:
+decode_failed:
{
/* already posted an error message */
- jpeg_abort_decompress (&dec->cinfo);
goto done;
}
alloc_failed:
}
goto exit;
}
-components_not_supported:
- {
- GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
- (_("Failed to decode JPEG image")),
- ("number of components not supported: %d (max 3)",
- dec->cinfo.num_components), ret);
- jpeg_abort_decompress (&dec->cinfo);
- goto done;
- }
-unsupported_colorspace:
- {
- GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
- (_("Failed to decode JPEG image")),
- ("Picture has unknown or unsupported colourspace"), ret);
- jpeg_abort_decompress (&dec->cinfo);
- goto done;
- }
-invalid_yuvrgbgrayscale:
- {
- GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
- (_("Failed to decode JPEG image")),
- ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
- jpeg_abort_decompress (&dec->cinfo);
- goto done;
- }
}
static gboolean
}
static gboolean
+gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
+{
+ const GstSegment *segment;
+
+ if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
+ goto done;
+
+ gst_event_parse_segment (event, &segment);
+
+ if (segment->format == GST_FORMAT_TIME)
+ gst_video_decoder_set_packetized (bdec, TRUE);
+ else
+ gst_video_decoder_set_packetized (bdec, FALSE);
+
+done:
+ return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
+}
+
+static gboolean
gst_jpeg_dec_start (GstVideoDecoder * bdec)
{
GstJpegDec *dec = (GstJpegDec *) bdec;