* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
-
+/**
+ * SECTION:element-jpegenc
+ *
+ * Encodes jpeg images.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch videotestsrc num-buffers=50 ! video/x-raw-yuv, framerate='(fraction)'5/1 ! jpegenc ! avimux ! filesink location=mjpeg.avi
+ * ]| a pipeline to mux 5 JPEG frames per second into a 10 sec. long motion jpeg
+ * avi.
+ * </refsect2>
+ */
#ifdef HAVE_CONFIG_H
#include "config.h"
#include <string.h>
#include "gstjpegenc.h"
+#include "gstjpeg.h"
#include <gst/video/video.h>
-/* elementfactory information */
-static const GstElementDetails gst_jpegenc_details =
-GST_ELEMENT_DETAILS ("JPEG image encoder",
- "Codec/Encoder/Image",
- "Encode images in JPEG format",
- "Wim Taymans <wim.taymans@tvd.be>");
+/* experimental */
+/* setting smoothig seems to have no effect in libjepeg
+#define ENABLE_SMOOTHING 1
+*/
GST_DEBUG_CATEGORY_STATIC (jpegenc_debug);
#define GST_CAT_DEFAULT jpegenc_debug
#define JPEG_DEFAULT_QUALITY 85
-
-/* These macros are adapted from videotestsrc.c
- * and/or gst-plugins/gst/games/gstvideoimage.c */
-
-/* I420 */
-#define I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
-#define I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
-#define I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(I420_Y_ROWSTRIDE(width)))/2)
-
-#define I420_Y_OFFSET(w,h) (0)
-#define I420_U_OFFSET(w,h) (I420_Y_OFFSET(w,h)+(I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
-#define I420_V_OFFSET(w,h) (I420_U_OFFSET(w,h)+(I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
-
-#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+#define JPEG_DEFAULT_SMOOTHING 0
+#define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
/* JpegEnc signals and args */
enum
enum
{
- ARG_0,
- ARG_QUALITY,
- ARG_SMOOTHING
- /* FILL ME */
+ PROP_0,
+ PROP_QUALITY,
+ PROP_SMOOTHING,
+ PROP_IDCT_METHOD
};
+static void gst_jpegenc_reset (GstJpegEnc * enc);
static void gst_jpegenc_base_init (gpointer g_class);
static void gst_jpegenc_class_init (GstJpegEnc * klass);
static void gst_jpegenc_init (GstJpegEnc * jpegenc);
return jpegenc_type;
}
+/* *INDENT-OFF* */
static GstStaticPadTemplate gst_jpegenc_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV
+ ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, YVYU, Y444 }") "; "
+ GST_VIDEO_CAPS_RGB "; " GST_VIDEO_CAPS_BGR "; "
+ GST_VIDEO_CAPS_RGBx "; " GST_VIDEO_CAPS_xRGB "; "
+ GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_xBGR "; "
+ GST_VIDEO_CAPS_GRAY8)
);
+/* *INDENT-ON* */
static GstStaticPadTemplate gst_jpegenc_src_pad_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, "
- "width = (int) [ 16, 4096 ], "
- "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0/1, MAX ]")
+ "width = (int) [ 16, 65535 ], "
+ "height = (int) [ 16, 65535 ], " "framerate = (fraction) [ 0/1, MAX ]")
);
static void
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_jpegenc_sink_pad_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_jpegenc_src_pad_template));
- gst_element_class_set_details (element_class, &gst_jpegenc_details);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpegenc_sink_pad_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpegenc_src_pad_template);
+ gst_element_class_set_details_simple (element_class, "JPEG image encoder",
+ "Codec/Encoder/Image",
+ "Encode images in JPEG format", "Wim Taymans <wim.taymans@tvd.be>");
}
static void
gobject_class->get_property = gst_jpegenc_get_property;
- g_object_class_install_property (gobject_class, ARG_QUALITY,
+ g_object_class_install_property (gobject_class, PROP_QUALITY,
g_param_spec_int ("quality", "Quality", "Quality of encoding",
- 0, 100, JPEG_DEFAULT_QUALITY, G_PARAM_READWRITE));
-#if 0
+ 0, 100, JPEG_DEFAULT_QUALITY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+#ifdef ENABLE_SMOOTHING
/* disabled, since it doesn't seem to work */
- g_object_class_install_property (gobject_class, ARG_SMOOTHING,
+ g_object_class_install_property (gobject_class, PROP_SMOOTHING,
g_param_spec_int ("smoothing", "Smoothing", "Smoothing factor",
- 0, 100, 0, G_PARAM_READWRITE));
+ 0, 100, JPEG_DEFAULT_SMOOTHING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
#endif
+ g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
+ g_param_spec_enum ("idct-method", "IDCT Method",
+ "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
+ JPEG_DEFAULT_IDCT_METHOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
gstelement_class->change_state = gst_jpegenc_change_state;
gobject_class->finalize = gst_jpegenc_finalize;
GST_DEBUG ("gst_jpegenc_chain: init_destination");
}
-static gboolean
+static boolean
gst_jpegenc_flush_destination (j_compress_ptr cinfo)
{
- GST_DEBUG ("gst_jpegenc_chain: flush_destination: buffer too small !!!");
+ GstBuffer *overflow_buffer;
+ guint32 old_buffer_size;
+ GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
+ GST_DEBUG_OBJECT (jpegenc,
+ "gst_jpegenc_chain: flush_destination: buffer too small");
+
+ /* Our output buffer wasn't big enough.
+ * Make a new buffer that's twice the size, */
+ old_buffer_size = GST_BUFFER_SIZE (jpegenc->output_buffer);
+ gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
+ GST_BUFFER_OFFSET_NONE, old_buffer_size * 2,
+ GST_PAD_CAPS (jpegenc->srcpad), &overflow_buffer);
+ memcpy (GST_BUFFER_DATA (overflow_buffer),
+ GST_BUFFER_DATA (jpegenc->output_buffer), old_buffer_size);
+
+ gst_buffer_copy_metadata (overflow_buffer, jpegenc->output_buffer,
+ GST_BUFFER_COPY_TIMESTAMPS);
+
+ /* drop it into place, */
+ gst_buffer_unref (jpegenc->output_buffer);
+ jpegenc->output_buffer = overflow_buffer;
+
+ /* and last, update libjpeg on where to work. */
+ jpegenc->jdest.next_output_byte =
+ GST_BUFFER_DATA (jpegenc->output_buffer) + old_buffer_size;
+ jpegenc->jdest.free_in_buffer =
+ GST_BUFFER_SIZE (jpegenc->output_buffer) - old_buffer_size;
+
return TRUE;
}
static void
gst_jpegenc_term_destination (j_compress_ptr cinfo)
{
- GST_DEBUG ("gst_jpegenc_chain: term_source");
+ GstJpegEnc *jpegenc = (GstJpegEnc *) (cinfo->client_data);
+ GST_DEBUG_OBJECT (jpegenc, "gst_jpegenc_chain: term_source");
+
+ /* Trim the buffer size and push it. */
+ GST_BUFFER_SIZE (jpegenc->output_buffer) =
+ GST_BUFFER_SIZE (jpegenc->output_buffer) - jpegenc->jdest.free_in_buffer;
+
+ g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0);
+
+ jpegenc->last_ret = gst_pad_push (jpegenc->srcpad, jpegenc->output_buffer);
+ jpegenc->output_buffer = NULL;
}
static void
jpegenc->srcpad =
gst_pad_new_from_static_template (&gst_jpegenc_src_pad_template, "src");
- gst_pad_set_getcaps_function (jpegenc->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpegenc_getcaps));
- /*gst_pad_set_setcaps_function (jpegenc->sinkpad, gst_jpegenc_setcaps); */
- gst_pad_use_fixed_caps (jpegenc->sinkpad);
+ gst_pad_use_fixed_caps (jpegenc->srcpad);
gst_element_add_pad (GST_ELEMENT (jpegenc), jpegenc->srcpad);
/* reset the initial video state */
jpegenc->jdest.empty_output_buffer = gst_jpegenc_flush_destination;
jpegenc->jdest.term_destination = gst_jpegenc_term_destination;
jpegenc->cinfo.dest = &jpegenc->jdest;
+ jpegenc->cinfo.client_data = jpegenc;
+ /* init properties */
jpegenc->quality = JPEG_DEFAULT_QUALITY;
- jpegenc->smoothing = 0;
+ jpegenc->smoothing = JPEG_DEFAULT_SMOOTHING;
+ jpegenc->idct_method = JPEG_DEFAULT_IDCT_METHOD;
+
+ gst_jpegenc_reset (jpegenc);
}
static void
-gst_jpegenc_finalize (GObject * object)
+gst_jpegenc_reset (GstJpegEnc * enc)
{
+ gint i, j;
+
+ g_free (enc->line[0]);
+ g_free (enc->line[1]);
+ g_free (enc->line[2]);
+ enc->line[0] = NULL;
+ enc->line[1] = NULL;
+ enc->line[2] = NULL;
+ for (i = 0; i < 3; i++) {
+ for (j = 0; j < 4 * DCTSIZE; j++) {
+ g_free (enc->row[i][j]);
+ enc->row[i][j] = NULL;
+ }
+ }
+
+ enc->width = -1;
+ enc->height = -1;
+ enc->format = GST_VIDEO_FORMAT_UNKNOWN;
+ enc->fps_den = enc->par_den = 0;
+ enc->height = enc->width = 0;
+}
+static void
+gst_jpegenc_finalize (GObject * object)
+{
GstJpegEnc *filter = GST_JPEGENC (object);
jpeg_destroy_compress (&filter->cinfo);
gst_jpegenc_getcaps (GstPad * pad)
{
GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad));
- GstPad *otherpad;
- GstCaps *caps;
- const char *name;
- int i;
+ GstCaps *caps, *othercaps;
+ const GstCaps *templ;
+ gint i, j;
GstStructure *structure = NULL;
/* we want to proxy properties like width, height and framerate from the
other end of the element */
- otherpad = (pad == jpegenc->srcpad) ? jpegenc->sinkpad : jpegenc->srcpad;
- caps = gst_pad_get_allowed_caps (otherpad);
- if (pad == jpegenc->srcpad) {
- name = "image/jpeg";
- } else {
- name = "video/x-raw-yuv";
+
+ othercaps = gst_pad_peer_get_caps_reffed (jpegenc->srcpad);
+ if (othercaps == NULL ||
+ gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) {
+ caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
+ goto done;
}
- for (i = 0; i < gst_caps_get_size (caps); i++) {
- structure = gst_caps_get_structure (caps, i);
-
- gst_structure_set_name (structure, name);
- gst_structure_remove_field (structure, "format");
- /* ... but for the sink pad, we only do I420 anyway, so add that */
- if (pad == jpegenc->sinkpad) {
- gst_structure_set (structure, "format", GST_TYPE_FOURCC,
- GST_STR_FOURCC ("I420"), NULL);
+
+ caps = gst_caps_new_empty ();
+ templ = gst_pad_get_pad_template_caps (pad);
+
+ for (i = 0; i < gst_caps_get_size (templ); i++) {
+ /* pick fields from peer caps */
+ for (j = 0; j < gst_caps_get_size (othercaps); j++) {
+ GstStructure *s = gst_caps_get_structure (othercaps, j);
+ const GValue *val;
+
+ structure = gst_structure_copy (gst_caps_get_structure (templ, i));
+ if ((val = gst_structure_get_value (s, "width")))
+ gst_structure_set_value (structure, "width", val);
+ if ((val = gst_structure_get_value (s, "height")))
+ gst_structure_set_value (structure, "height", val);
+ if ((val = gst_structure_get_value (s, "framerate")))
+ gst_structure_set_value (structure, "framerate", val);
+
+ gst_caps_merge_structure (caps, structure);
}
}
+
+done:
+
+ gst_caps_replace (&othercaps, NULL);
gst_object_unref (jpegenc);
return caps;
static gboolean
gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps)
{
- GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad));
- GstStructure *structure;
+ GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad));
+ GstVideoFormat format;
+ gint width, height;
+ gint fps_num, fps_den;
+ gint par_num, par_den;
+ gint i;
GstCaps *othercaps;
- GstPad *otherpad;
gboolean ret;
- const GValue *framerate;
-
- otherpad = (pad == jpegenc->srcpad) ? jpegenc->sinkpad : jpegenc->srcpad;
- structure = gst_caps_get_structure (caps, 0);
- framerate = gst_structure_get_value (structure, "framerate");
- gst_structure_get_int (structure, "width", &jpegenc->width);
- gst_structure_get_int (structure, "height", &jpegenc->height);
+ /* get info from caps */
+ if (!gst_video_format_parse_caps (caps, &format, &width, &height))
+ goto refuse_caps;
+ /* optional; pass along if present */
+ fps_num = fps_den = -1;
+ par_num = par_den = -1;
+ gst_video_parse_caps_framerate (caps, &fps_num, &fps_den);
+ gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den);
+
+ if (width == enc->width && height == enc->height && enc->format == format
+ && fps_num == enc->fps_num && fps_den == enc->fps_den
+ && par_num == enc->par_num && par_den == enc->par_den)
+ return TRUE;
+
+ /* store input description */
+ enc->format = format;
+ enc->width = width;
+ enc->height = height;
+ enc->fps_num = fps_num;
+ enc->fps_den = fps_den;
+ enc->par_num = par_num;
+ enc->par_den = par_den;
+
+ /* prepare a cached image description */
+ enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0);
+ /* ... but any alpha is disregarded in encoding */
+ if (gst_video_format_is_gray (format))
+ enc->channels = 1;
+ else
+ enc->channels = 3;
+ enc->h_max_samp = 0;
+ enc->v_max_samp = 0;
+ for (i = 0; i < enc->channels; ++i) {
+ enc->cwidth[i] = gst_video_format_get_component_width (format, i, width);
+ enc->cheight[i] = gst_video_format_get_component_height (format, i, height);
+ enc->offset[i] = gst_video_format_get_component_offset (format, i, width,
+ height);
+ enc->stride[i] = gst_video_format_get_row_stride (format, i, width);
+ enc->inc[i] = gst_video_format_get_pixel_stride (format, i);
+ enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i];
+ enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]);
+ enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i];
+ enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]);
+ }
+ /* samp should only be 1, 2 or 4 */
+ g_assert (enc->h_max_samp <= 4);
+ g_assert (enc->v_max_samp <= 4);
+ /* now invert */
+ /* maximum is invariant, as one of the components should have samp 1 */
+ for (i = 0; i < enc->channels; ++i) {
+ enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i];
+ enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i];
+ }
+ enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1);
- othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (otherpad));
- if (framerate) {
+ othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad));
+ gst_caps_set_simple (othercaps,
+ "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL);
+ if (enc->fps_den > 0)
gst_caps_set_simple (othercaps,
- "width", G_TYPE_INT, jpegenc->width,
- "height", G_TYPE_INT, jpegenc->height,
- "framerate", GST_TYPE_FRACTION,
- gst_value_get_fraction_numerator (framerate),
- gst_value_get_fraction_denominator (framerate), NULL);
- } else {
+ "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL);
+ if (enc->par_den > 0)
gst_caps_set_simple (othercaps,
- "width", G_TYPE_INT, jpegenc->width,
- "height", G_TYPE_INT, jpegenc->height, NULL);
- }
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den,
+ NULL);
- ret = gst_pad_set_caps (jpegenc->srcpad, othercaps);
+ ret = gst_pad_set_caps (enc->srcpad, othercaps);
gst_caps_unref (othercaps);
- if (GST_PAD_LINK_SUCCESSFUL (ret)) {
- gst_jpegenc_resync (jpegenc);
- }
+ if (ret)
+ gst_jpegenc_resync (enc);
- gst_object_unref (jpegenc);
+ gst_object_unref (enc);
return ret;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps);
+ gst_object_unref (enc);
+ return FALSE;
+ }
}
static void
gst_jpegenc_resync (GstJpegEnc * jpegenc)
{
gint width, height;
+ gint i, j;
GST_DEBUG_OBJECT (jpegenc, "resync");
jpegenc->cinfo.image_width = width = jpegenc->width;
jpegenc->cinfo.image_height = height = jpegenc->height;
- jpegenc->cinfo.input_components = 3;
+ jpegenc->cinfo.input_components = jpegenc->channels;
GST_DEBUG_OBJECT (jpegenc, "width %d, height %d", width, height);
+ GST_DEBUG_OBJECT (jpegenc, "format %d", jpegenc->format);
+
+ if (gst_video_format_is_rgb (jpegenc->format)) {
+ GST_DEBUG_OBJECT (jpegenc, "RGB");
+ jpegenc->cinfo.in_color_space = JCS_RGB;
+ } else if (gst_video_format_is_gray (jpegenc->format)) {
+ GST_DEBUG_OBJECT (jpegenc, "gray");
+ jpegenc->cinfo.in_color_space = JCS_GRAYSCALE;
+ } else {
+ GST_DEBUG_OBJECT (jpegenc, "YUV");
+ jpegenc->cinfo.in_color_space = JCS_YCbCr;
+ }
+ /* input buffer size as max output */
+ jpegenc->bufsize = gst_video_format_get_size (jpegenc->format, width, height);
jpeg_set_defaults (&jpegenc->cinfo);
- jpegenc->cinfo.dct_method = JDCT_FASTEST;
- /*jpegenc->cinfo.dct_method = JDCT_DEFAULT; */
- /*jpegenc->cinfo.smoothing_factor = jpegenc->smoothing; */
- jpeg_set_quality (&jpegenc->cinfo, jpegenc->quality, TRUE);
-
-#if 0
- switch (jpegenc->format) {
- case GST_COLORSPACE_RGB24:
- jpegenc->bufsize = jpegenc->width * jpegenc->height * 3;
- GST_DEBUG ("gst_jpegenc_resync: setting format to RGB24");
- jpegenc->cinfo.in_color_space = JCS_RGB;
- jpegenc->cinfo.raw_data_in = FALSE;
- break;
- case GST_COLORSPACE_YUV420P:
-#endif
- jpegenc->bufsize = I420_SIZE (jpegenc->width, jpegenc->height);
- jpegenc->cinfo.raw_data_in = TRUE;
- jpegenc->cinfo.in_color_space = JCS_YCbCr;
- GST_DEBUG_OBJECT (jpegenc, "setting format to YUV420P");
- jpegenc->cinfo.comp_info[0].h_samp_factor = 2;
- jpegenc->cinfo.comp_info[0].v_samp_factor = 2;
- jpegenc->cinfo.comp_info[1].h_samp_factor = 1;
- jpegenc->cinfo.comp_info[1].v_samp_factor = 1;
- jpegenc->cinfo.comp_info[2].h_samp_factor = 1;
- jpegenc->cinfo.comp_info[2].v_samp_factor = 1;
-
- if (height != -1) {
- jpegenc->line[0] =
- g_realloc (jpegenc->line[0], height * sizeof (char *));
- jpegenc->line[1] =
- g_realloc (jpegenc->line[1], height * sizeof (char *) / 2);
- jpegenc->line[2] =
- g_realloc (jpegenc->line[2], height * sizeof (char *) / 2);
+ jpegenc->cinfo.raw_data_in = TRUE;
+ /* duh, libjpeg maps RGB to YUV ... and don't expect some conversion */
+ if (jpegenc->cinfo.in_color_space == JCS_RGB)
+ jpeg_set_colorspace (&jpegenc->cinfo, JCS_RGB);
+
+ GST_DEBUG_OBJECT (jpegenc, "h_max_samp=%d, v_max_samp=%d",
+ jpegenc->h_max_samp, jpegenc->v_max_samp);
+ /* image dimension info */
+ for (i = 0; i < jpegenc->channels; i++) {
+ GST_DEBUG_OBJECT (jpegenc, "comp %i: h_samp=%d, v_samp=%d", i,
+ jpegenc->h_samp[i], jpegenc->v_samp[i]);
+ jpegenc->cinfo.comp_info[i].h_samp_factor = jpegenc->h_samp[i];
+ jpegenc->cinfo.comp_info[i].v_samp_factor = jpegenc->v_samp[i];
+ g_free (jpegenc->line[i]);
+ jpegenc->line[i] = g_new (guchar *, jpegenc->v_max_samp * DCTSIZE);
+ if (!jpegenc->planar) {
+ for (j = 0; j < jpegenc->v_max_samp * DCTSIZE; j++) {
+ g_free (jpegenc->row[i][j]);
+ jpegenc->row[i][j] = g_malloc (width);
+ jpegenc->line[i][j] = jpegenc->row[i][j];
}
-
- GST_DEBUG_OBJECT (jpegenc, "setting format done");
-#if 0
- break;
- default:
- printf ("gst_jpegenc_resync: unsupported colorspace, using RGB\n");
- jpegenc->bufsize = jpegenc->width * jpegenc->height * 3;
- jpegenc->cinfo.in_color_space = JCS_RGB;
- break;
+ }
}
-#endif
+
+ /* guard against a potential error in gst_jpegenc_term_destination
+ which occurs iff bufsize % 4 < free_space_remaining */
+ jpegenc->bufsize = GST_ROUND_UP_4 (jpegenc->bufsize);
jpeg_suppress_tables (&jpegenc->cinfo, TRUE);
- //jpeg_suppress_tables(&jpegenc->cinfo, FALSE);
- jpegenc->buffer = NULL;
GST_DEBUG_OBJECT (jpegenc, "resync done");
}
GstJpegEnc *jpegenc;
guchar *data;
gulong size;
- GstBuffer *outbuf;
- guint height, width;
+ guint height;
guchar *base[3], *end[3];
gint i, j, k;
jpegenc = GST_JPEGENC (GST_OBJECT_PARENT (pad));
+ if (G_UNLIKELY (jpegenc->width <= 0 || jpegenc->height <= 0))
+ goto not_negotiated;
+
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
- GST_DEBUG_OBJECT (jpegenc, "got buffer of %u bytes", size);
+ GST_LOG_OBJECT (jpegenc, "got buffer of %lu bytes", size);
ret =
gst_pad_alloc_buffer_and_set_caps (jpegenc->srcpad,
GST_BUFFER_OFFSET_NONE, jpegenc->bufsize, GST_PAD_CAPS (jpegenc->srcpad),
- &outbuf);
+ &jpegenc->output_buffer);
if (ret != GST_FLOW_OK)
goto done;
- gst_buffer_stamp (outbuf, buf);
+ gst_buffer_copy_metadata (jpegenc->output_buffer, buf,
+ GST_BUFFER_COPY_TIMESTAMPS);
- width = jpegenc->width;
height = jpegenc->height;
- base[0] = data + I420_Y_OFFSET (width, height);
- base[1] = data + I420_U_OFFSET (width, height);
- base[2] = data + I420_V_OFFSET (width, height);
-
- end[0] = base[0] + height * I420_Y_ROWSTRIDE (width);
- end[1] = base[1] + (height / 2) * I420_U_ROWSTRIDE (width);
- end[2] = base[2] + (height / 2) * I420_V_ROWSTRIDE (width);
+ for (i = 0; i < jpegenc->channels; i++) {
+ base[i] = data + jpegenc->offset[i];
+ end[i] = base[i] + jpegenc->cheight[i] * jpegenc->stride[i];
+ }
- jpegenc->jdest.next_output_byte = GST_BUFFER_DATA (outbuf);
- jpegenc->jdest.free_in_buffer = GST_BUFFER_SIZE (outbuf);
+ jpegenc->jdest.next_output_byte = GST_BUFFER_DATA (jpegenc->output_buffer);
+ jpegenc->jdest.free_in_buffer = GST_BUFFER_SIZE (jpegenc->output_buffer);
+ /* prepare for raw input */
+#if JPEG_LIB_VERSION >= 70
+ jpegenc->cinfo.do_fancy_downsampling = FALSE;
+#endif
jpegenc->cinfo.smoothing_factor = jpegenc->smoothing;
+ jpegenc->cinfo.dct_method = jpegenc->idct_method;
jpeg_set_quality (&jpegenc->cinfo, jpegenc->quality, TRUE);
jpeg_start_compress (&jpegenc->cinfo, TRUE);
- GST_DEBUG_OBJECT (jpegenc, "compressing");
-
- for (i = 0; i < height; i += 2 * DCTSIZE) {
- /*g_print ("next scanline: %d\n", jpegenc->cinfo.next_scanline); */
- for (j = 0, k = 0; j < (2 * DCTSIZE); j += 2, k++) {
- jpegenc->line[0][j] = base[0];
- if (base[0] + I420_Y_ROWSTRIDE (width) < end[0])
- base[0] += I420_Y_ROWSTRIDE (width);
- jpegenc->line[0][j + 1] = base[0];
- if (base[0] + I420_Y_ROWSTRIDE (width) < end[0])
- base[0] += I420_Y_ROWSTRIDE (width);
- jpegenc->line[1][k] = base[1];
- if (base[1] + I420_U_ROWSTRIDE (width) < end[1])
- base[1] += I420_U_ROWSTRIDE (width);
- jpegenc->line[2][k] = base[2];
- if (base[2] + I420_V_ROWSTRIDE (width) < end[2])
- base[2] += I420_V_ROWSTRIDE (width);
+ GST_LOG_OBJECT (jpegenc, "compressing");
+
+ if (jpegenc->planar) {
+ for (i = 0; i < height; i += jpegenc->v_max_samp * DCTSIZE) {
+ for (k = 0; k < jpegenc->channels; k++) {
+ for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
+ jpegenc->line[k][j] = base[k];
+ if (base[k] + jpegenc->stride[k] < end[k])
+ base[k] += jpegenc->stride[k];
+ }
+ }
+ jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
+ jpegenc->v_max_samp * DCTSIZE);
+ }
+ } else {
+ for (i = 0; i < height; i += jpegenc->v_max_samp * DCTSIZE) {
+ for (k = 0; k < jpegenc->channels; k++) {
+ for (j = 0; j < jpegenc->v_samp[k] * DCTSIZE; j++) {
+ guchar *src, *dst;
+ gint l;
+
+ /* ouch, copy line */
+ src = base[k];
+ dst = jpegenc->line[k][j];
+ for (l = jpegenc->cwidth[k]; l > 0; l--) {
+ *dst = *src;
+ src += jpegenc->inc[k];
+ dst++;
+ }
+ if (base[k] + jpegenc->stride[k] < end[k])
+ base[k] += jpegenc->stride[k];
+ }
+ }
+ jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line,
+ jpegenc->v_max_samp * DCTSIZE);
}
- jpeg_write_raw_data (&jpegenc->cinfo, jpegenc->line, 2 * DCTSIZE);
}
+ /* This will ensure that gst_jpegenc_term_destination is called; we push
+ the final output buffer from there */
jpeg_finish_compress (&jpegenc->cinfo);
- GST_DEBUG_OBJECT (jpegenc, "compressing done");
-
- GST_BUFFER_SIZE (outbuf) =
- GST_ROUND_UP_4 (jpegenc->bufsize - jpegenc->jdest.free_in_buffer);
-
- g_signal_emit (G_OBJECT (jpegenc), gst_jpegenc_signals[FRAME_ENCODED], 0);
-
- ret = gst_pad_push (jpegenc->srcpad, outbuf);
+ GST_LOG_OBJECT (jpegenc, "compressing done");
done:
gst_buffer_unref (buf);
return ret;
+
+/* ERRORS */
+not_negotiated:
+ {
+ GST_WARNING_OBJECT (jpegenc, "no input format set (no caps on buffer)");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
}
static void
GST_OBJECT_LOCK (jpegenc);
switch (prop_id) {
- case ARG_QUALITY:
+ case PROP_QUALITY:
jpegenc->quality = g_value_get_int (value);
break;
- case ARG_SMOOTHING:
+#ifdef ENABLE_SMOOTHING
+ case PROP_SMOOTHING:
jpegenc->smoothing = g_value_get_int (value);
break;
+#endif
+ case PROP_IDCT_METHOD:
+ jpegenc->idct_method = g_value_get_enum (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
GST_OBJECT_LOCK (jpegenc);
switch (prop_id) {
- case ARG_QUALITY:
+ case PROP_QUALITY:
g_value_set_int (value, jpegenc->quality);
break;
- case ARG_SMOOTHING:
+#ifdef ENABLE_SMOOTHING
+ case PROP_SMOOTHING:
g_value_set_int (value, jpegenc->smoothing);
break;
+#endif
+ case PROP_IDCT_METHOD:
+ g_value_set_enum (value, jpegenc->idct_method);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
filter->line[0] = NULL;
filter->line[1] = NULL;
filter->line[2] = NULL;
- gst_jpegenc_resync (filter);
break;
default:
break;
return ret;
switch (transition) {
- case GST_STATE_CHANGE_READY_TO_NULL:
- g_free (filter->line[0]);
- g_free (filter->line[1]);
- g_free (filter->line[2]);
- filter->line[0] = NULL;
- filter->line[1] = NULL;
- filter->line[2] = NULL;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_jpegenc_reset (filter);
break;
default:
break;