2 * Copyright (C) 2012 Collabora Ltd.
3 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 * Copyright (C) 2013 Sebastian Dröge <slomo@circular-chaos.org>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
27 #include "gstopenjpegdec.h"
31 GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_dec_debug);
32 #define GST_CAT_DEFAULT gst_openjpeg_dec_debug
34 static gboolean gst_openjpeg_dec_start (GstVideoDecoder * decoder);
35 static gboolean gst_openjpeg_dec_stop (GstVideoDecoder * decoder);
36 static gboolean gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
37 GstVideoCodecState * state);
38 static GstFlowReturn gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
39 GstVideoCodecFrame * frame);
40 static gboolean gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder,
43 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
44 #define GRAY16 "GRAY16_LE"
45 #define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
47 #define GRAY16 "GRAY16_BE"
48 #define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
51 static GstStaticPadTemplate gst_openjpeg_dec_sink_template =
52 GST_STATIC_PAD_TEMPLATE ("sink",
55 GST_STATIC_CAPS ("image/x-j2c, "
56 "colorspace = (string) { sRGB, sYUV, GRAY }; "
58 "colorspace = (string) { sRGB, sYUV, GRAY }; " "image/jp2")
61 static GstStaticPadTemplate gst_openjpeg_dec_src_template =
62 GST_STATIC_PAD_TEMPLATE ("src",
65 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
67 "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
70 #define parent_class gst_openjpeg_dec_parent_class
71 G_DEFINE_TYPE (GstOpenJPEGDec, gst_openjpeg_dec, GST_TYPE_VIDEO_DECODER);
74 gst_openjpeg_dec_class_init (GstOpenJPEGDecClass * klass)
76 GstElementClass *element_class;
77 GstVideoDecoderClass *video_decoder_class;
79 element_class = (GstElementClass *) klass;
80 video_decoder_class = (GstVideoDecoderClass *) klass;
82 gst_element_class_add_static_pad_template (element_class,
83 &gst_openjpeg_dec_src_template);
84 gst_element_class_add_static_pad_template (element_class,
85 &gst_openjpeg_dec_sink_template);
87 gst_element_class_set_static_metadata (element_class,
88 "OpenJPEG JPEG2000 decoder",
89 "Codec/Decoder/Video",
90 "Decode JPEG2000 streams",
91 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
93 video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_start);
94 video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_stop);
95 video_decoder_class->set_format =
96 GST_DEBUG_FUNCPTR (gst_openjpeg_dec_set_format);
97 video_decoder_class->handle_frame =
98 GST_DEBUG_FUNCPTR (gst_openjpeg_dec_handle_frame);
99 video_decoder_class->decide_allocation = gst_openjpeg_dec_decide_allocation;
101 GST_DEBUG_CATEGORY_INIT (gst_openjpeg_dec_debug, "openjpegdec", 0,
106 gst_openjpeg_dec_init (GstOpenJPEGDec * self)
108 GstVideoDecoder *decoder = (GstVideoDecoder *) self;
110 gst_video_decoder_set_packetized (decoder, TRUE);
111 gst_video_decoder_set_needs_format (decoder, TRUE);
112 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
114 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (self));
115 opj_set_default_decoder_parameters (&self->params);
116 #ifdef HAVE_OPENJPEG_1
117 self->params.cp_limit_decoding = NO_LIMITATION;
122 gst_openjpeg_dec_start (GstVideoDecoder * decoder)
124 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
126 GST_DEBUG_OBJECT (self, "Starting");
132 gst_openjpeg_dec_stop (GstVideoDecoder * video_decoder)
134 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (video_decoder);
136 GST_DEBUG_OBJECT (self, "Stopping");
138 if (self->output_state) {
139 gst_video_codec_state_unref (self->output_state);
140 self->output_state = NULL;
143 if (self->input_state) {
144 gst_video_codec_state_unref (self->input_state);
145 self->input_state = NULL;
148 GST_DEBUG_OBJECT (self, "Stopped");
154 gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
155 GstVideoCodecState * state)
157 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
159 const gchar *color_space;
161 GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
163 s = gst_caps_get_structure (state->caps, 0);
165 self->color_space = OPJ_CLRSPC_UNKNOWN;
167 if (gst_structure_has_name (s, "image/jp2")) {
168 self->codec_format = OPJ_CODEC_JP2;
169 self->is_jp2c = FALSE;
170 } else if (gst_structure_has_name (s, "image/x-j2c")) {
171 self->codec_format = OPJ_CODEC_J2K;
172 self->is_jp2c = TRUE;
173 } else if (gst_structure_has_name (s, "image/x-jpc")) {
174 self->codec_format = OPJ_CODEC_J2K;
175 self->is_jp2c = FALSE;
177 g_return_val_if_reached (FALSE);
180 if ((color_space = gst_structure_get_string (s, "colorspace"))) {
181 if (g_str_equal (color_space, "sRGB"))
182 self->color_space = OPJ_CLRSPC_SRGB;
183 else if (g_str_equal (color_space, "GRAY"))
184 self->color_space = OPJ_CLRSPC_GRAY;
185 else if (g_str_equal (color_space, "sYUV"))
186 self->color_space = OPJ_CLRSPC_SYCC;
190 gst_structure_get_int (s, "num-components", &self->ncomps);
192 if (self->input_state)
193 gst_video_codec_state_unref (self->input_state);
194 self->input_state = gst_video_codec_state_ref (state);
200 fill_frame_packed8_4 (GstVideoFrame * frame, opj_image_t * image)
203 guint8 *data_out, *tmp;
204 const gint *data_in[4];
207 w = GST_VIDEO_FRAME_WIDTH (frame);
208 h = GST_VIDEO_FRAME_HEIGHT (frame);
209 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
210 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
212 data_in[0] = image->comps[0].data;
213 data_in[1] = image->comps[1].data;
214 data_in[2] = image->comps[2].data;
215 data_in[3] = image->comps[3].data;
217 for (y = 0; y < h; y++) {
220 for (x = 0; x < w; x++) {
221 tmp[0] = *data_in[3];
222 tmp[1] = *data_in[0];
223 tmp[2] = *data_in[1];
224 tmp[3] = *data_in[2];
237 fill_frame_packed16_4 (GstVideoFrame * frame, opj_image_t * image)
240 guint16 *data_out, *tmp;
241 const gint *data_in[4];
245 w = GST_VIDEO_FRAME_WIDTH (frame);
246 h = GST_VIDEO_FRAME_HEIGHT (frame);
247 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
248 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
250 data_in[0] = image->comps[0].data;
251 data_in[1] = image->comps[1].data;
252 data_in[2] = image->comps[2].data;
253 data_in[3] = image->comps[3].data;
255 shift[0] = 16 - image->comps[0].prec;
256 shift[1] = 16 - image->comps[1].prec;
257 shift[2] = 16 - image->comps[2].prec;
258 shift[3] = 16 - image->comps[3].prec;
260 for (y = 0; y < h; y++) {
263 for (x = 0; x < w; x++) {
264 tmp[0] = *data_in[3] << shift[3];
265 tmp[1] = *data_in[0] << shift[0];
266 tmp[2] = *data_in[1] << shift[1];
267 tmp[3] = *data_in[2] << shift[2];
280 fill_frame_packed8_3 (GstVideoFrame * frame, opj_image_t * image)
283 guint8 *data_out, *tmp;
284 const gint *data_in[3];
287 w = GST_VIDEO_FRAME_WIDTH (frame);
288 h = GST_VIDEO_FRAME_HEIGHT (frame);
289 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
290 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
292 data_in[0] = image->comps[0].data;
293 data_in[1] = image->comps[1].data;
294 data_in[2] = image->comps[2].data;
296 for (y = 0; y < h; y++) {
299 for (x = 0; x < w; x++) {
300 tmp[1] = *data_in[0];
301 tmp[2] = *data_in[1];
302 tmp[3] = *data_in[2];
314 fill_frame_packed16_3 (GstVideoFrame * frame, opj_image_t * image)
317 guint16 *data_out, *tmp;
318 const gint *data_in[3];
322 w = GST_VIDEO_FRAME_WIDTH (frame);
323 h = GST_VIDEO_FRAME_HEIGHT (frame);
324 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
325 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
327 data_in[0] = image->comps[0].data;
328 data_in[1] = image->comps[1].data;
329 data_in[2] = image->comps[2].data;
331 shift[0] = 16 - image->comps[0].prec;
332 shift[1] = 16 - image->comps[1].prec;
333 shift[2] = 16 - image->comps[2].prec;
335 for (y = 0; y < h; y++) {
338 for (x = 0; x < w; x++) {
339 tmp[1] = *data_in[0] << shift[0];
340 tmp[2] = *data_in[1] << shift[1];
341 tmp[3] = *data_in[2] << shift[2];
353 fill_frame_planar8_1 (GstVideoFrame * frame, opj_image_t * image)
356 guint8 *data_out, *tmp;
360 w = GST_VIDEO_FRAME_WIDTH (frame);
361 h = GST_VIDEO_FRAME_HEIGHT (frame);
362 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
363 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
365 data_in = image->comps[0].data;
367 for (y = 0; y < h; y++) {
370 for (x = 0; x < w; x++) {
381 fill_frame_planar16_1 (GstVideoFrame * frame, opj_image_t * image)
384 guint16 *data_out, *tmp;
389 w = GST_VIDEO_FRAME_WIDTH (frame);
390 h = GST_VIDEO_FRAME_HEIGHT (frame);
391 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
392 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
394 data_in = image->comps[0].data;
396 shift = 16 - image->comps[0].prec;
398 for (y = 0; y < h; y++) {
401 for (x = 0; x < w; x++) {
402 *tmp = *data_in << shift;
412 fill_frame_planar8_3 (GstVideoFrame * frame, opj_image_t * image)
415 guint8 *data_out, *tmp;
419 for (c = 0; c < 3; c++) {
420 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
421 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
422 dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c);
423 data_out = GST_VIDEO_FRAME_COMP_DATA (frame, c);
424 data_in = image->comps[c].data;
426 for (y = 0; y < h; y++) {
429 for (x = 0; x < w; x++) {
440 fill_frame_planar16_3 (GstVideoFrame * frame, opj_image_t * image)
443 guint16 *data_out, *tmp;
448 for (c = 0; c < 3; c++) {
449 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
450 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
451 dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c) / 2;
452 data_out = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
453 data_in = image->comps[c].data;
454 shift = 16 - image->comps[c].prec;
456 for (y = 0; y < h; y++) {
459 for (x = 0; x < w; x++) {
460 *tmp = *data_in << shift;
470 fill_frame_planar8_3_generic (GstVideoFrame * frame, opj_image_t * image)
473 guint8 *data_out, *tmp;
474 const gint *data_in[3];
478 w = GST_VIDEO_FRAME_WIDTH (frame);
479 h = GST_VIDEO_FRAME_HEIGHT (frame);
480 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
481 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
483 data_in[0] = image->comps[0].data;
484 data_in[1] = image->comps[1].data;
485 data_in[2] = image->comps[2].data;
487 dx[0] = image->comps[0].dx;
488 dx[1] = image->comps[1].dx;
489 dx[2] = image->comps[2].dx;
491 dy[0] = image->comps[0].dy;
492 dy[1] = image->comps[1].dy;
493 dy[2] = image->comps[2].dy;
495 for (y = 0; y < h; y++) {
498 for (x = 0; x < w; x++) {
500 tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]];
501 tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]];
502 tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]];
510 fill_frame_planar8_4_generic (GstVideoFrame * frame, opj_image_t * image)
513 guint8 *data_out, *tmp;
514 const gint *data_in[4];
518 w = GST_VIDEO_FRAME_WIDTH (frame);
519 h = GST_VIDEO_FRAME_HEIGHT (frame);
520 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
521 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
523 data_in[0] = image->comps[0].data;
524 data_in[1] = image->comps[1].data;
525 data_in[2] = image->comps[2].data;
526 data_in[3] = image->comps[3].data;
528 dx[0] = image->comps[0].dx;
529 dx[1] = image->comps[1].dx;
530 dx[2] = image->comps[2].dx;
531 dx[3] = image->comps[3].dx;
533 dy[0] = image->comps[0].dy;
534 dy[1] = image->comps[1].dy;
535 dy[2] = image->comps[2].dy;
536 dy[3] = image->comps[3].dy;
538 for (y = 0; y < h; y++) {
541 for (x = 0; x < w; x++) {
542 tmp[0] = data_in[3][((y / dy[3]) * w + x) / dx[3]];
543 tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]];
544 tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]];
545 tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]];
553 fill_frame_planar16_3_generic (GstVideoFrame * frame, opj_image_t * image)
556 guint16 *data_out, *tmp;
557 const gint *data_in[3];
559 gint dx[3], dy[3], shift[3];
561 w = GST_VIDEO_FRAME_WIDTH (frame);
562 h = GST_VIDEO_FRAME_HEIGHT (frame);
563 data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
564 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
566 data_in[0] = image->comps[0].data;
567 data_in[1] = image->comps[1].data;
568 data_in[2] = image->comps[2].data;
570 dx[0] = image->comps[0].dx;
571 dx[1] = image->comps[1].dx;
572 dx[2] = image->comps[2].dx;
574 dy[0] = image->comps[0].dy;
575 dy[1] = image->comps[1].dy;
576 dy[2] = image->comps[2].dy;
578 shift[0] = 16 - image->comps[0].prec;
579 shift[1] = 16 - image->comps[1].prec;
580 shift[2] = 16 - image->comps[2].prec;
582 for (y = 0; y < h; y++) {
585 for (x = 0; x < w; x++) {
587 tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0];
588 tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1];
589 tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2];
597 fill_frame_planar16_4_generic (GstVideoFrame * frame, opj_image_t * image)
600 guint16 *data_out, *tmp;
601 const gint *data_in[4];
603 gint dx[4], dy[4], shift[4];
605 w = GST_VIDEO_FRAME_WIDTH (frame);
606 h = GST_VIDEO_FRAME_HEIGHT (frame);
607 data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
608 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
610 data_in[0] = image->comps[0].data;
611 data_in[1] = image->comps[1].data;
612 data_in[2] = image->comps[2].data;
613 data_in[3] = image->comps[3].data;
615 dx[0] = image->comps[0].dx;
616 dx[1] = image->comps[1].dx;
617 dx[2] = image->comps[2].dx;
618 dx[3] = image->comps[3].dx;
620 dy[0] = image->comps[0].dy;
621 dy[1] = image->comps[1].dy;
622 dy[2] = image->comps[2].dy;
623 dy[3] = image->comps[3].dy;
625 shift[0] = 16 - image->comps[0].prec;
626 shift[1] = 16 - image->comps[1].prec;
627 shift[2] = 16 - image->comps[2].prec;
628 shift[3] = 16 - image->comps[3].prec;
630 for (y = 0; y < h; y++) {
633 for (x = 0; x < w; x++) {
634 tmp[0] = data_in[3][((y / dy[3]) * w + x) / dx[3]] << shift[3];
635 tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0];
636 tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1];
637 tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2];
645 get_highest_prec (opj_image_t * image)
650 for (i = 0; i < image->numcomps; i++)
651 ret = MAX (image->comps[i].prec, ret);
657 gst_openjpeg_dec_negotiate (GstOpenJPEGDec * self, opj_image_t * image)
659 GstVideoFormat format;
662 if (image->color_space == OPJ_CLRSPC_UNKNOWN || image->color_space == 0)
663 image->color_space = self->color_space;
665 switch (image->color_space) {
666 case OPJ_CLRSPC_SRGB:
667 if (image->numcomps == 4) {
668 if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
669 image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
670 image->comps[2].dx != 1 || image->comps[2].dy != 1 ||
671 image->comps[3].dx != 1 || image->comps[3].dy != 1) {
672 GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");
673 return GST_FLOW_NOT_NEGOTIATED;
676 if (get_highest_prec (image) == 8) {
677 self->fill_frame = fill_frame_packed8_4;
678 format = GST_VIDEO_FORMAT_ARGB;
679 } else if (get_highest_prec (image) <= 16) {
680 self->fill_frame = fill_frame_packed16_4;
681 format = GST_VIDEO_FORMAT_ARGB64;
683 GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[3].prec);
684 return GST_FLOW_NOT_NEGOTIATED;
686 } else if (image->numcomps == 3) {
687 if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
688 image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
689 image->comps[2].dx != 1 || image->comps[2].dy != 1) {
690 GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");
691 return GST_FLOW_NOT_NEGOTIATED;
694 if (get_highest_prec (image) == 8) {
695 self->fill_frame = fill_frame_packed8_3;
696 format = GST_VIDEO_FORMAT_ARGB;
697 } else if (get_highest_prec (image) <= 16) {
698 self->fill_frame = fill_frame_packed16_3;
699 format = GST_VIDEO_FORMAT_ARGB64;
701 GST_ERROR_OBJECT (self, "Unsupported depth %d",
702 get_highest_prec (image));
703 return GST_FLOW_NOT_NEGOTIATED;
706 GST_ERROR_OBJECT (self, "Unsupported number of RGB components: %d",
708 return GST_FLOW_NOT_NEGOTIATED;
711 case OPJ_CLRSPC_GRAY:
712 if (image->numcomps == 1) {
713 if (image->comps[0].dx != 1 && image->comps[0].dy != 1) {
714 GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");
715 return GST_FLOW_NOT_NEGOTIATED;
718 if (get_highest_prec (image) == 8) {
719 self->fill_frame = fill_frame_planar8_1;
720 format = GST_VIDEO_FORMAT_GRAY8;
721 } else if (get_highest_prec (image) <= 16) {
722 self->fill_frame = fill_frame_planar16_1;
723 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
724 format = GST_VIDEO_FORMAT_GRAY16_LE;
726 format = GST_VIDEO_FORMAT_GRAY16_BE;
729 GST_ERROR_OBJECT (self, "Unsupported depth %d",
730 get_highest_prec (image));
731 return GST_FLOW_NOT_NEGOTIATED;
734 GST_ERROR_OBJECT (self, "Unsupported number of GRAY components: %d",
736 return GST_FLOW_NOT_NEGOTIATED;
739 case OPJ_CLRSPC_SYCC:
740 if (image->numcomps != 3 && image->numcomps != 4) {
741 GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
743 return GST_FLOW_NOT_NEGOTIATED;
746 if (image->comps[0].dx != 1 || image->comps[0].dy != 1) {
747 GST_ERROR_OBJECT (self, "Sub-sampling of luma plane not supported");
748 return GST_FLOW_NOT_NEGOTIATED;
751 if (image->comps[1].dx != image->comps[2].dx ||
752 image->comps[1].dy != image->comps[2].dy) {
753 GST_ERROR_OBJECT (self,
754 "Different sub-sampling of chroma planes not supported");
755 return GST_FLOW_ERROR;
758 if (image->numcomps == 4) {
759 if (image->comps[3].dx != 1 || image->comps[3].dy != 1) {
760 GST_ERROR_OBJECT (self, "Sub-sampling of alpha plane not supported");
761 return GST_FLOW_NOT_NEGOTIATED;
764 if (get_highest_prec (image) == 8) {
765 self->fill_frame = fill_frame_planar8_4_generic;
766 format = GST_VIDEO_FORMAT_AYUV;
767 } else if (image->comps[3].prec <= 16) {
768 self->fill_frame = fill_frame_planar16_4_generic;
769 format = GST_VIDEO_FORMAT_AYUV64;
771 GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[0].prec);
772 return GST_FLOW_NOT_NEGOTIATED;
774 } else if (image->numcomps == 3) {
775 if (get_highest_prec (image) == 8) {
776 if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
777 self->fill_frame = fill_frame_planar8_3;
778 format = GST_VIDEO_FORMAT_Y444;
779 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
780 self->fill_frame = fill_frame_planar8_3;
781 format = GST_VIDEO_FORMAT_Y42B;
782 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
783 self->fill_frame = fill_frame_planar8_3;
784 format = GST_VIDEO_FORMAT_I420;
785 } else if (image->comps[1].dx == 4 && image->comps[1].dy == 1) {
786 self->fill_frame = fill_frame_planar8_3;
787 format = GST_VIDEO_FORMAT_Y41B;
788 } else if (image->comps[1].dx == 4 && image->comps[1].dy == 4) {
789 self->fill_frame = fill_frame_planar8_3;
790 format = GST_VIDEO_FORMAT_YUV9;
792 self->fill_frame = fill_frame_planar8_3_generic;
793 format = GST_VIDEO_FORMAT_AYUV;
795 } else if (get_highest_prec (image) <= 16) {
796 if (image->comps[0].prec == 10 &&
797 image->comps[1].prec == 10 && image->comps[2].prec == 10) {
798 if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
799 self->fill_frame = fill_frame_planar16_3;
800 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
801 format = GST_VIDEO_FORMAT_Y444_10LE;
803 format = GST_VIDEO_FORMAT_Y444_10BE;
805 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
806 self->fill_frame = fill_frame_planar16_3;
807 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
808 format = GST_VIDEO_FORMAT_I422_10LE;
810 format = GST_VIDEO_FORMAT_I422_10BE;
812 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
813 self->fill_frame = fill_frame_planar16_3;
814 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
815 format = GST_VIDEO_FORMAT_I420_10LE;
817 format = GST_VIDEO_FORMAT_I420_10BE;
820 self->fill_frame = fill_frame_planar16_3_generic;
821 format = GST_VIDEO_FORMAT_AYUV64;
824 self->fill_frame = fill_frame_planar16_3_generic;
825 format = GST_VIDEO_FORMAT_AYUV64;
828 GST_ERROR_OBJECT (self, "Unsupported depth %d",
829 get_highest_prec (image));
830 return GST_FLOW_NOT_NEGOTIATED;
833 GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
835 return GST_FLOW_NOT_NEGOTIATED;
839 GST_ERROR_OBJECT (self, "Unsupported colorspace %d", image->color_space);
840 return GST_FLOW_NOT_NEGOTIATED;
843 width = image->x1 - image->x0;
844 height = image->y1 - image->y0;
846 if (!self->output_state ||
847 self->output_state->info.finfo->format != format ||
848 self->output_state->info.width != width ||
849 self->output_state->info.height != height) {
850 if (self->output_state)
851 gst_video_codec_state_unref (self->output_state);
853 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), format,
854 width, height, self->input_state);
856 if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
857 return GST_FLOW_NOT_NEGOTIATED;
864 gst_openjpeg_dec_opj_error (const char *msg, void *userdata)
866 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
867 gchar *trimmed = g_strchomp (g_strdup (msg));
868 GST_TRACE_OBJECT (self, "openjpeg error: %s", trimmed);
873 gst_openjpeg_dec_opj_warning (const char *msg, void *userdata)
875 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
876 gchar *trimmed = g_strchomp (g_strdup (msg));
877 GST_TRACE_OBJECT (self, "openjpeg warning: %s", trimmed);
882 gst_openjpeg_dec_opj_info (const char *msg, void *userdata)
884 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
885 gchar *trimmed = g_strchomp (g_strdup (msg));
886 GST_TRACE_OBJECT (self, "openjpeg info: %s", trimmed);
890 #ifndef HAVE_OPENJPEG_1
898 read_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
900 MemStream *mstream = p_user_data;
903 if (mstream->offset == mstream->size)
906 if (mstream->offset + p_nb_bytes > mstream->size)
907 read = mstream->size - mstream->offset;
911 memcpy (p_buffer, mstream->data + mstream->offset, read);
912 mstream->offset += read;
918 write_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
920 g_return_val_if_reached (-1);
924 skip_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
926 MemStream *mstream = p_user_data;
929 if (mstream->offset + p_nb_bytes > mstream->size)
930 skip = mstream->size - mstream->offset;
934 mstream->offset += skip;
940 seek_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
942 MemStream *mstream = p_user_data;
944 if (p_nb_bytes > mstream->size)
947 mstream->offset = p_nb_bytes;
954 gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
955 GstVideoCodecFrame * frame)
957 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
958 GstFlowReturn ret = GST_FLOW_OK;
961 #ifdef HAVE_OPENJPEG_1
966 opj_stream_t *stream;
970 GstVideoFrame vframe;
971 opj_dparameters_t params;
973 GST_DEBUG_OBJECT (self, "Handling frame");
975 deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
977 GST_LOG_OBJECT (self, "Dropping too late frame: deadline %" G_GINT64_FORMAT,
979 ret = gst_video_decoder_drop_frame (decoder, frame);
983 dec = opj_create_decompress (self->codec_format);
985 goto initialization_error;
987 #ifdef HAVE_OPENJPEG_1
988 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
990 opj_event_mgr_t callbacks;
992 callbacks.error_handler = gst_openjpeg_dec_opj_error;
993 callbacks.warning_handler = gst_openjpeg_dec_opj_warning;
994 callbacks.info_handler = gst_openjpeg_dec_opj_info;
995 opj_set_event_mgr ((opj_common_ptr) dec, &callbacks, self);
997 opj_set_event_mgr ((opj_common_ptr) dec, NULL, NULL);
1000 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
1002 opj_set_info_handler (dec, gst_openjpeg_dec_opj_info, self);
1003 opj_set_warning_handler (dec, gst_openjpeg_dec_opj_warning, self);
1004 opj_set_error_handler (dec, gst_openjpeg_dec_opj_error, self);
1006 opj_set_info_handler (dec, NULL, NULL);
1007 opj_set_warning_handler (dec, NULL, NULL);
1008 opj_set_error_handler (dec, NULL, NULL);
1012 params = self->params;
1014 params.jpwl_exp_comps = self->ncomps;
1015 opj_setup_decoder (dec, ¶ms);
1017 if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
1018 goto map_read_error;
1020 #ifdef HAVE_OPENJPEG_1
1021 io = opj_cio_open ((opj_common_ptr) dec, map.data + (self->is_jp2c ? 8 : 0),
1022 map.size - (self->is_jp2c ? 8 : 0));
1026 image = opj_decode (dec, io);
1030 stream = opj_stream_create (4096, OPJ_TRUE);
1034 mstream.data = map.data + (self->is_jp2c ? 8 : 0);
1036 mstream.size = map.size - (self->is_jp2c ? 8 : 0);
1038 opj_stream_set_read_function (stream, read_fn);
1039 opj_stream_set_write_function (stream, write_fn);
1040 opj_stream_set_skip_function (stream, skip_fn);
1041 opj_stream_set_seek_function (stream, seek_fn);
1042 opj_stream_set_user_data (stream, &mstream);
1043 opj_stream_set_user_data_length (stream, mstream.size);
1046 if (!opj_read_header (stream, dec, &image))
1049 if (!opj_decode (dec, stream, image))
1056 for (i = 0; i < image->numcomps; i++) {
1057 if (image->comps[i].data == NULL)
1062 gst_buffer_unmap (frame->input_buffer, &map);
1064 ret = gst_openjpeg_dec_negotiate (self, image);
1065 if (ret != GST_FLOW_OK)
1066 goto negotiate_error;
1068 ret = gst_video_decoder_allocate_output_frame (decoder, frame);
1069 if (ret != GST_FLOW_OK)
1070 goto allocate_error;
1072 if (!gst_video_frame_map (&vframe, &self->output_state->info,
1073 frame->output_buffer, GST_MAP_WRITE))
1074 goto map_write_error;
1076 self->fill_frame (&vframe, image);
1078 gst_video_frame_unmap (&vframe);
1080 #ifdef HAVE_OPENJPEG_1
1082 opj_image_destroy (image);
1083 opj_destroy_decompress (dec);
1085 opj_end_decompress (dec, stream);
1086 opj_stream_destroy (stream);
1087 opj_image_destroy (image);
1088 opj_destroy_codec (dec);
1091 ret = gst_video_decoder_finish_frame (decoder, frame);
1095 initialization_error:
1097 gst_video_codec_frame_unref (frame);
1098 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1099 ("Failed to initialize OpenJPEG decoder"), (NULL));
1100 return GST_FLOW_ERROR;
1104 #ifdef HAVE_OPENJPEG_1
1105 opj_destroy_decompress (dec);
1107 opj_destroy_codec (dec);
1109 gst_video_codec_frame_unref (frame);
1111 GST_ELEMENT_ERROR (self, CORE, FAILED,
1112 ("Failed to map input buffer"), (NULL));
1113 return GST_FLOW_ERROR;
1117 #ifdef HAVE_OPENJPEG_1
1118 opj_destroy_decompress (dec);
1120 opj_destroy_codec (dec);
1122 gst_buffer_unmap (frame->input_buffer, &map);
1123 gst_video_codec_frame_unref (frame);
1125 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1126 ("Failed to open OpenJPEG stream"), (NULL));
1127 return GST_FLOW_ERROR;
1132 opj_image_destroy (image);
1133 #ifdef HAVE_OPENJPEG_1
1135 opj_destroy_decompress (dec);
1137 opj_stream_destroy (stream);
1138 opj_destroy_codec (dec);
1140 gst_buffer_unmap (frame->input_buffer, &map);
1141 gst_video_codec_frame_unref (frame);
1143 GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
1144 ("Failed to decode OpenJPEG stream"), (NULL), ret);
1149 opj_image_destroy (image);
1150 #ifdef HAVE_OPENJPEG_1
1152 opj_destroy_decompress (dec);
1154 opj_stream_destroy (stream);
1155 opj_destroy_codec (dec);
1157 gst_video_codec_frame_unref (frame);
1159 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1160 ("Failed to negotiate"), (NULL));
1165 opj_image_destroy (image);
1166 #ifdef HAVE_OPENJPEG_1
1168 opj_destroy_decompress (dec);
1170 opj_stream_destroy (stream);
1171 opj_destroy_codec (dec);
1173 gst_video_codec_frame_unref (frame);
1175 GST_ELEMENT_ERROR (self, CORE, FAILED,
1176 ("Failed to allocate output buffer"), (NULL));
1181 opj_image_destroy (image);
1182 #ifdef HAVE_OPENJPEG_1
1184 opj_destroy_decompress (dec);
1186 opj_stream_destroy (stream);
1187 opj_destroy_codec (dec);
1189 gst_video_codec_frame_unref (frame);
1191 GST_ELEMENT_ERROR (self, CORE, FAILED,
1192 ("Failed to map output buffer"), (NULL));
1193 return GST_FLOW_ERROR;
1198 gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1200 GstBufferPool *pool;
1201 GstStructure *config;
1203 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1207 g_assert (gst_query_get_n_allocation_pools (query) > 0);
1208 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1209 g_assert (pool != NULL);
1211 config = gst_buffer_pool_get_config (pool);
1212 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1213 gst_buffer_pool_config_add_option (config,
1214 GST_BUFFER_POOL_OPTION_VIDEO_META);
1216 gst_buffer_pool_set_config (pool, config);
1217 gst_object_unref (pool);