encoder: add infrastructure for per-slice handling of packed headers.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_jpeg.c
1 /*
2  *  gstvaapidecoder_jpeg.c - JPEG decoder
3  *
4  *  Copyright (C) 2011-2013 Intel Corporation
5  *    Author: Wind Yuan <feng.yuan@intel.com>
6  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7  *
8  *  This library is free software; you can redistribute it and/or
9  *  modify it under the terms of the GNU Lesser General Public License
10  *  as published by the Free Software Foundation; either version 2.1
11  *  of the License, or (at your option) any later version.
12  *
13  *  This library is distributed in the hope that it will be useful,
14  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
15  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16  *  Lesser General Public License for more details.
17  *
18  *  You should have received a copy of the GNU Lesser General Public
19  *  License along with this library; if not, write to the Free
20  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
21  *  Boston, MA 02110-1301 USA
22  */
23
24 /**
25  * SECTION:gstvaapidecoder_jpeg
26  * @short_description: JPEG decoder
27  */
28
29 #include "sysdeps.h"
30 #include <string.h>
31 #include <gst/codecparsers/gstjpegparser.h>
32 #include "gstvaapicompat.h"
33 #include "gstvaapidecoder_jpeg.h"
34 #include "gstvaapidecoder_objects.h"
35 #include "gstvaapidecoder_priv.h"
36 #include "gstvaapidisplay_priv.h"
37 #include "gstvaapiobject_priv.h"
38
39 #ifdef HAVE_VA_VA_DEC_JPEG_H
40 # include <va/va_dec_jpeg.h>
41 #endif
42
43 #define DEBUG 1
44 #include "gstvaapidebug.h"
45
46 #define GST_VAAPI_DECODER_JPEG_CAST(decoder) \
47     ((GstVaapiDecoderJpeg *)(decoder))
48
49 typedef struct _GstVaapiDecoderJpegPrivate      GstVaapiDecoderJpegPrivate;
50 typedef struct _GstVaapiDecoderJpegClass        GstVaapiDecoderJpegClass;
51
52 typedef enum  {
53     GST_JPEG_VIDEO_STATE_GOT_SOI        = 1 << 0,
54     GST_JPEG_VIDEO_STATE_GOT_SOF        = 1 << 1,
55     GST_JPEG_VIDEO_STATE_GOT_SOS        = 1 << 2,
56     GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE  = 1 << 3,
57     GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE   = 1 << 4,
58
59     GST_JPEG_VIDEO_STATE_VALID_PICTURE = (
60         GST_JPEG_VIDEO_STATE_GOT_SOI |
61         GST_JPEG_VIDEO_STATE_GOT_SOF |
62         GST_JPEG_VIDEO_STATE_GOT_SOS),
63 } GstJpegVideoState;
64
65 struct _GstVaapiDecoderJpegPrivate {
66     GstVaapiProfile             profile;
67     guint                       width;
68     guint                       height;
69     GstVaapiPicture            *current_picture;
70     GstJpegFrameHdr             frame_hdr;
71     GstJpegHuffmanTables        huf_tables;
72     GstJpegQuantTables          quant_tables;
73     guint                       mcu_restart;
74     guint                       parser_state;
75     guint                       decoder_state;
76     guint                       is_opened       : 1;
77     guint                       profile_changed : 1;
78 };
79
80 /**
81  * GstVaapiDecoderJpeg:
82  *
83  * A decoder based on Jpeg.
84  */
85 struct _GstVaapiDecoderJpeg {
86     /*< private >*/
87     GstVaapiDecoder             parent_instance;
88     GstVaapiDecoderJpegPrivate  priv;
89 };
90
91 /**
92  * GstVaapiDecoderJpegClass:
93  *
94  * A decoder class based on Jpeg.
95  */
96 struct _GstVaapiDecoderJpegClass {
97     /*< private >*/
98     GstVaapiDecoderClass parent_class;
99 };
100
101 static inline void
102 unit_set_marker_code(GstVaapiDecoderUnit *unit, GstJpegMarkerCode marker)
103 {
104     unit->parsed_info = GSIZE_TO_POINTER(marker);
105 }
106
107 static inline GstJpegMarkerCode
108 unit_get_marker_code(GstVaapiDecoderUnit *unit)
109 {
110     return GPOINTER_TO_SIZE(unit->parsed_info);
111 }
112
113 static void
114 gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
115 {
116     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
117
118     gst_vaapi_picture_replace(&priv->current_picture, NULL);
119
120     /* Reset all */
121     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
122     priv->width                 = 0;
123     priv->height                = 0;
124     priv->is_opened             = FALSE;
125     priv->profile_changed       = TRUE;
126 }
127
128 static gboolean
129 gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder)
130 {
131     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
132
133     gst_vaapi_decoder_jpeg_close(decoder);
134
135     priv->parser_state  = 0;
136     priv->decoder_state = 0;
137     return TRUE;
138 }
139
140 static void
141 gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoder *base_decoder)
142 {
143     GstVaapiDecoderJpeg * const decoder =
144         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
145
146     gst_vaapi_decoder_jpeg_close(decoder);
147 }
148
149 static gboolean
150 gst_vaapi_decoder_jpeg_create(GstVaapiDecoder *base_decoder)
151 {
152     GstVaapiDecoderJpeg * const decoder =
153         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
154     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
155
156     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
157     priv->profile_changed       = TRUE;
158     return TRUE;
159 }
160
161 static GstVaapiDecoderStatus
162 ensure_context(GstVaapiDecoderJpeg *decoder)
163 {
164     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
165     GstVaapiProfile profiles[2];
166     GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
167     guint i, n_profiles = 0;
168     gboolean reset_context = FALSE;
169
170     if (priv->profile_changed) {
171         GST_DEBUG("profile changed");
172         priv->profile_changed = FALSE;
173         reset_context         = TRUE;
174
175         profiles[n_profiles++] = priv->profile;
176         //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
177         //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
178
179         for (i = 0; i < n_profiles; i++) {
180             if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
181                                               profiles[i], entrypoint))
182                 break;
183         }
184         if (i == n_profiles)
185             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
186         priv->profile = profiles[i];
187     }
188
189     if (reset_context) {
190         GstVaapiContextInfo info;
191
192         info.profile    = priv->profile;
193         info.entrypoint = entrypoint;
194         info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
195         info.width      = priv->width;
196         info.height     = priv->height;
197         info.ref_frames = 2;
198         reset_context   = gst_vaapi_decoder_ensure_context(
199             GST_VAAPI_DECODER(decoder),
200             &info
201         );
202         if (!reset_context)
203             return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
204     }
205     return GST_VAAPI_DECODER_STATUS_SUCCESS;
206 }
207
208 static inline gboolean
209 is_valid_state(guint state, guint ref_state)
210 {
211     return (state & ref_state) == ref_state;
212 }
213
214 #define VALID_STATE(TYPE, STATE)                \
215     is_valid_state(priv->G_PASTE(TYPE,_state),  \
216         G_PASTE(GST_JPEG_VIDEO_STATE_,STATE))
217
218 static GstVaapiDecoderStatus
219 decode_current_picture(GstVaapiDecoderJpeg *decoder)
220 {
221     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
222     GstVaapiPicture * const picture = priv->current_picture;
223
224     if (!VALID_STATE(decoder, VALID_PICTURE))
225         goto drop_frame;
226     priv->decoder_state = 0;
227
228     if (!picture)
229         return GST_VAAPI_DECODER_STATUS_SUCCESS;
230
231     if (!gst_vaapi_picture_decode(picture))
232         goto error;
233     if (!gst_vaapi_picture_output(picture))
234         goto error;
235     gst_vaapi_picture_replace(&priv->current_picture, NULL);
236     return GST_VAAPI_DECODER_STATUS_SUCCESS;
237
238 error:
239     gst_vaapi_picture_replace(&priv->current_picture, NULL);
240     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
241
242 drop_frame:
243     priv->decoder_state = 0;
244     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
245 }
246
247 static gboolean
248 fill_picture(
249     GstVaapiDecoderJpeg *decoder, 
250     GstVaapiPicture     *picture,
251     GstJpegFrameHdr     *frame_hdr
252 )
253 {
254     VAPictureParameterBufferJPEGBaseline * const pic_param = picture->param;
255     guint i;
256
257     memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
258     pic_param->picture_width    = frame_hdr->width;
259     pic_param->picture_height   = frame_hdr->height;
260
261     pic_param->num_components   = frame_hdr->num_components;
262     if (frame_hdr->num_components > 4)
263         return FALSE;
264     for (i = 0; i < pic_param->num_components; i++) {
265         pic_param->components[i].component_id =
266             frame_hdr->components[i].identifier;
267         pic_param->components[i].h_sampling_factor =
268             frame_hdr->components[i].horizontal_factor;
269         pic_param->components[i].v_sampling_factor =
270             frame_hdr->components[i].vertical_factor;
271         pic_param->components[i].quantiser_table_selector =
272             frame_hdr->components[i].quant_table_selector;
273     }
274     return TRUE;
275 }
276
277 static GstVaapiDecoderStatus
278 fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
279 {
280     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
281     VAIQMatrixBufferJPEGBaseline *iq_matrix;
282     guint i, j, num_tables;
283
284     if (!VALID_STATE(decoder, GOT_IQ_TABLE))
285         gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
286     
287     picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
288     if (!picture->iq_matrix) {
289         GST_ERROR("failed to allocate quantiser table");
290         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
291     }
292     iq_matrix = picture->iq_matrix->param;
293
294     num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
295                      GST_JPEG_MAX_QUANT_ELEMENTS);
296
297     for (i = 0; i < num_tables; i++) {
298         GstJpegQuantTable * const quant_table =
299             &priv->quant_tables.quant_tables[i];
300
301         iq_matrix->load_quantiser_table[i] = quant_table->valid;
302         if (!iq_matrix->load_quantiser_table[i])
303             continue;
304
305         if (quant_table->quant_precision != 0) {
306             // Only Baseline profile is supported, thus 8-bit Qk values
307             GST_ERROR("unsupported quantization table element precision");
308             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
309         }
310
311         for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
312             iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
313         iq_matrix->load_quantiser_table[i] = 1;
314         quant_table->valid = FALSE;
315     }
316     return GST_VAAPI_DECODER_STATUS_SUCCESS;
317 }
318
319 static gboolean
320 huffman_tables_updated(const GstJpegHuffmanTables *huf_tables)
321 {
322     guint i;
323
324     for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
325         if (huf_tables->dc_tables[i].valid)
326             return TRUE;
327     for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
328         if (huf_tables->ac_tables[i].valid)
329             return TRUE;
330     return FALSE;
331 }
332
333 static void
334 huffman_tables_reset(GstJpegHuffmanTables *huf_tables)
335 {
336     guint i;
337
338     for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
339         huf_tables->dc_tables[i].valid = FALSE;
340     for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
341         huf_tables->ac_tables[i].valid = FALSE;
342 }
343
344 static void
345 fill_huffman_table(GstVaapiHuffmanTable *huf_table,
346     const GstJpegHuffmanTables *huf_tables)
347 {
348     VAHuffmanTableBufferJPEGBaseline * const huffman_table = huf_table->param;
349     guint i, num_tables;
350
351     num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
352                      GST_JPEG_MAX_SCAN_COMPONENTS);
353
354     for (i = 0; i < num_tables; i++) {
355         huffman_table->load_huffman_table[i] =
356             huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
357         if (!huffman_table->load_huffman_table[i])
358             continue;
359
360         memcpy(huffman_table->huffman_table[i].num_dc_codes,
361                huf_tables->dc_tables[i].huf_bits,
362                sizeof(huffman_table->huffman_table[i].num_dc_codes));
363         memcpy(huffman_table->huffman_table[i].dc_values,
364                huf_tables->dc_tables[i].huf_values,
365                sizeof(huffman_table->huffman_table[i].dc_values));
366         memcpy(huffman_table->huffman_table[i].num_ac_codes,
367                huf_tables->ac_tables[i].huf_bits,
368                sizeof(huffman_table->huffman_table[i].num_ac_codes));
369         memcpy(huffman_table->huffman_table[i].ac_values,
370                huf_tables->ac_tables[i].huf_values,
371                sizeof(huffman_table->huffman_table[i].ac_values));
372         memset(huffman_table->huffman_table[i].pad,
373                0,
374                sizeof(huffman_table->huffman_table[i].pad));
375     }
376 }
377
378 static void
379 get_max_sampling_factors(const GstJpegFrameHdr *frame_hdr,
380     guint *h_max_ptr, guint *v_max_ptr)
381 {
382     guint h_max = frame_hdr->components[0].horizontal_factor;
383     guint v_max = frame_hdr->components[0].vertical_factor;
384     guint i;
385
386     for (i = 1; i < frame_hdr->num_components; i++) {
387         const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
388         if (h_max < fcp->horizontal_factor)
389             h_max = fcp->horizontal_factor;
390         if (v_max < fcp->vertical_factor)
391             v_max = fcp->vertical_factor;
392     }
393
394     if (h_max_ptr)
395         *h_max_ptr = h_max;
396     if (v_max_ptr)
397         *v_max_ptr = v_max;
398 }
399
400 static const GstJpegFrameComponent *
401 get_component(const GstJpegFrameHdr *frame_hdr, guint selector)
402 {
403     guint i;
404
405     for (i = 0; i < frame_hdr->num_components; i++) {
406         const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
407         if (fcp->identifier == selector)
408             return fcp;
409     }
410     return NULL;
411 }
412
413 static GstVaapiDecoderStatus
414 decode_picture(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
415     const guchar *buf)
416 {
417     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
418     GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
419
420     if (!VALID_STATE(decoder, GOT_SOI))
421         return GST_VAAPI_DECODER_STATUS_SUCCESS;
422
423     switch (seg->marker) {
424     case GST_JPEG_MARKER_SOF_MIN:
425         priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
426         break;
427     default:
428         GST_ERROR("unsupported profile %d", seg->marker);
429         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
430     }
431
432     memset(frame_hdr, 0, sizeof(*frame_hdr));
433     if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf + seg->offset, seg->size, 0)) {
434         GST_ERROR("failed to parse image");
435         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
436     }
437     priv->height = frame_hdr->height;
438     priv->width  = frame_hdr->width;
439
440     priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
441     return GST_VAAPI_DECODER_STATUS_SUCCESS;
442 }
443
444 static GstVaapiDecoderStatus
445 decode_huffman_table(
446     GstVaapiDecoderJpeg *decoder,
447     const guchar        *buf,
448     guint                buf_size
449 )
450 {
451     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
452
453     if (!VALID_STATE(decoder, GOT_SOI))
454         return GST_VAAPI_DECODER_STATUS_SUCCESS;
455
456     if (!gst_jpeg_parse_huffman_table(&priv->huf_tables, buf, buf_size, 0)) {
457         GST_ERROR("failed to parse Huffman table");
458         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
459     }
460
461     priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE;
462     return GST_VAAPI_DECODER_STATUS_SUCCESS;
463 }
464
465 static GstVaapiDecoderStatus
466 decode_quant_table(
467     GstVaapiDecoderJpeg *decoder,
468     const guchar        *buf,
469     guint                buf_size
470 )
471 {
472     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
473
474     if (!VALID_STATE(decoder, GOT_SOI))
475         return GST_VAAPI_DECODER_STATUS_SUCCESS;
476
477     if (!gst_jpeg_parse_quant_table(&priv->quant_tables, buf, buf_size, 0)) {
478         GST_ERROR("failed to parse quantization table");
479         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
480     }
481
482     priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE;
483     return GST_VAAPI_DECODER_STATUS_SUCCESS;
484 }
485
486 static GstVaapiDecoderStatus
487 decode_restart_interval(
488     GstVaapiDecoderJpeg *decoder,
489     const guchar        *buf,
490     guint                buf_size
491 )
492 {
493     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
494
495     if (!VALID_STATE(decoder, GOT_SOI))
496         return GST_VAAPI_DECODER_STATUS_SUCCESS;
497
498     if (!gst_jpeg_parse_restart_interval(&priv->mcu_restart, buf, buf_size, 0)) {
499         GST_ERROR("failed to parse restart interval");
500         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
501     }
502     return GST_VAAPI_DECODER_STATUS_SUCCESS;
503 }
504
505 static GstVaapiDecoderStatus
506 decode_scan(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
507     const guchar *buf)
508 {
509     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
510     GstVaapiPicture * const picture = priv->current_picture;
511     GstVaapiSlice *slice;
512     VASliceParameterBufferJPEGBaseline *slice_param;
513     GstJpegScanHdr scan_hdr;
514     guint scan_hdr_size, scan_data_size;
515     guint i, h_max, v_max, mcu_width, mcu_height;
516
517     if (!VALID_STATE(decoder, GOT_SOF))
518         return GST_VAAPI_DECODER_STATUS_SUCCESS;
519
520     scan_hdr_size = (buf[seg->offset] << 8) | buf[seg->offset + 1];
521     scan_data_size = seg->size - scan_hdr_size;
522
523     memset(&scan_hdr, 0, sizeof(scan_hdr));
524     if (!gst_jpeg_parse_scan_hdr(&scan_hdr, buf + seg->offset, seg->size, 0)) {
525         GST_ERROR("failed to parse scan header");
526         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
527     }
528
529     slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder,
530         buf + seg->offset + scan_hdr_size, scan_data_size);
531     if (!slice) {
532         GST_ERROR("failed to allocate slice");
533         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
534     }
535     gst_vaapi_picture_add_slice(picture, slice);
536
537     if (!VALID_STATE(decoder, GOT_HUF_TABLE))
538         gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
539
540     // Update VA Huffman table if it changed for this scan
541     if (huffman_tables_updated(&priv->huf_tables)) {
542         slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
543         if (!slice->huf_table) {
544             GST_ERROR("failed to allocate Huffman tables");
545             huffman_tables_reset(&priv->huf_tables);
546             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
547         }
548         fill_huffman_table(slice->huf_table, &priv->huf_tables);
549         huffman_tables_reset(&priv->huf_tables);
550     }
551
552     slice_param = slice->param;
553     slice_param->num_components = scan_hdr.num_components;
554     for (i = 0; i < scan_hdr.num_components; i++) {
555         slice_param->components[i].component_selector =
556             scan_hdr.components[i].component_selector;
557         slice_param->components[i].dc_table_selector =
558             scan_hdr.components[i].dc_selector;
559         slice_param->components[i].ac_table_selector =
560             scan_hdr.components[i].ac_selector;
561     }
562     slice_param->restart_interval = priv->mcu_restart;
563     slice_param->slice_horizontal_position = 0;
564     slice_param->slice_vertical_position = 0;
565
566     get_max_sampling_factors(&priv->frame_hdr, &h_max, &v_max);
567     mcu_width = 8 * h_max;
568     mcu_height = 8 * v_max;
569
570     if (scan_hdr.num_components == 1) { // Non-interleaved
571         const guint Csj = slice_param->components[0].component_selector;
572         const GstJpegFrameComponent * const fcp =
573             get_component(&priv->frame_hdr, Csj);
574
575         if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
576             GST_ERROR("failed to validate image component %u", Csj);
577             return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
578         }
579         mcu_width /= fcp->horizontal_factor;
580         mcu_height /= fcp->vertical_factor;
581     }
582     slice_param->num_mcus =
583         ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
584         ((priv->frame_hdr.height + mcu_height - 1) / mcu_height);
585
586     priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
587     return GST_VAAPI_DECODER_STATUS_SUCCESS;
588 }
589
590 static GstVaapiDecoderStatus
591 decode_segment(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
592     const guchar *buf)
593 {
594     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
595     GstVaapiDecoderStatus status;
596
597     // Decode segment
598     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
599     switch (seg->marker) {
600     case GST_JPEG_MARKER_SOI:
601         priv->mcu_restart = 0;
602         priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
603         break;
604     case GST_JPEG_MARKER_EOI:
605         priv->decoder_state = 0;
606         break;
607     case GST_JPEG_MARKER_DAC:
608         GST_ERROR("unsupported arithmetic coding mode");
609         status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
610         break;
611     case GST_JPEG_MARKER_DHT:
612         status = decode_huffman_table(decoder, buf + seg->offset, seg->size);
613         break;
614     case GST_JPEG_MARKER_DQT:
615         status = decode_quant_table(decoder, buf + seg->offset, seg->size);
616         break;
617     case GST_JPEG_MARKER_DRI:
618         status = decode_restart_interval(decoder, buf + seg->offset, seg->size);
619         break;
620     case GST_JPEG_MARKER_SOS:
621         status = decode_scan(decoder, seg, buf);
622         break;
623     default:
624         // SOFn segments
625         if (seg->marker >= GST_JPEG_MARKER_SOF_MIN &&
626             seg->marker <= GST_JPEG_MARKER_SOF_MAX)
627             status = decode_picture(decoder, seg, buf);
628         break;
629     }
630     return status;
631 }
632
633 static GstVaapiDecoderStatus
634 ensure_decoder(GstVaapiDecoderJpeg *decoder)
635 {
636     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
637
638     if (!priv->is_opened) {
639         priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder);
640         if (!priv->is_opened)
641             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
642     }
643     return GST_VAAPI_DECODER_STATUS_SUCCESS;
644 }
645
646 static gboolean
647 is_scan_complete(GstJpegMarkerCode marker)
648 {
649     // Scan is assumed to be complete when the new segment is not RSTi
650     return marker < GST_JPEG_MARKER_RST_MIN || marker > GST_JPEG_MARKER_RST_MAX;
651 }
652
653 static GstVaapiDecoderStatus
654 gst_vaapi_decoder_jpeg_parse(GstVaapiDecoder *base_decoder,
655     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
656 {
657     GstVaapiDecoderJpeg * const decoder =
658         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
659     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
660     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
661     GstVaapiDecoderStatus status;
662     GstJpegMarkerCode marker;
663     GstJpegMarkerSegment seg;
664     const guchar *buf;
665     guint buf_size, flags;
666     gint ofs1, ofs2;
667
668     status = ensure_decoder(decoder);
669     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
670         return status;
671
672     /* Expect at least 2 bytes for the marker */
673     buf_size = gst_adapter_available(adapter);
674     if (buf_size < 2)
675         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
676
677     buf = gst_adapter_map(adapter, buf_size);
678     if (!buf)
679         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
680
681     ofs1 = ps->input_offset1 - 2;
682     if (ofs1 < 0)
683         ofs1 = 0;
684
685     for (;;) {
686         // Skip any garbage until we reach SOI, if needed
687         if (!gst_jpeg_parse(&seg, buf, buf_size, ofs1)) {
688             gst_adapter_unmap(adapter);
689             ps->input_offset1 = buf_size;
690             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
691         }
692         ofs1 = seg.offset;
693
694         marker = seg.marker;
695         if (!VALID_STATE(parser, GOT_SOI) && marker != GST_JPEG_MARKER_SOI)
696             continue;
697         if (marker == GST_JPEG_MARKER_SOS) {
698             ofs2 = ps->input_offset2 - 2;
699             if (ofs2 < ofs1 + seg.size)
700                 ofs2 = ofs1 + seg.size;
701
702             // Parse the whole scan + ECSs, including RSTi
703             for (;;) {
704                 if (!gst_jpeg_parse(&seg, buf, buf_size, ofs2)) {
705                     gst_adapter_unmap(adapter);
706                     ps->input_offset1 = ofs1;
707                     ps->input_offset2 = buf_size;
708                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
709                 }
710
711                 if (is_scan_complete(seg.marker))
712                     break;
713                 ofs2 = seg.offset + seg.size;
714             }
715             ofs2 = seg.offset - 2;
716         }
717         else {
718             // Check that the whole segment is actually available (in buffer)
719             ofs2 = ofs1 + seg.size;
720             if (ofs2 > buf_size) {
721                 gst_adapter_unmap(adapter);
722                 ps->input_offset1 = ofs1;
723                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
724             }
725         }
726         break;
727     }
728     gst_adapter_unmap(adapter);
729
730     unit->size = ofs2 - ofs1;
731     unit_set_marker_code(unit, marker);
732     gst_adapter_flush(adapter, ofs1);
733     ps->input_offset1 = 2;
734     ps->input_offset2 = 2;
735
736     flags = 0;
737     switch (marker) {
738     case GST_JPEG_MARKER_SOI:
739         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
740         priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
741         break;
742     case GST_JPEG_MARKER_EOI:
743         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
744         priv->parser_state = 0;
745         break;
746     case GST_JPEG_MARKER_SOS:
747         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
748         priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
749         break;
750     case GST_JPEG_MARKER_DAC:
751     case GST_JPEG_MARKER_DHT:
752     case GST_JPEG_MARKER_DQT:
753         if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOF)
754             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
755         break;
756     case GST_JPEG_MARKER_DRI:
757         if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOS)
758             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
759         break;
760     case GST_JPEG_MARKER_DNL:
761         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
762         break;
763     case GST_JPEG_MARKER_COM:
764         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
765         break;
766     default:
767         /* SOFn segments */
768         if (marker >= GST_JPEG_MARKER_SOF_MIN &&
769             marker <= GST_JPEG_MARKER_SOF_MAX)
770             priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
771
772         /* Application segments */
773         else if (marker >= GST_JPEG_MARKER_APP_MIN &&
774                  marker <= GST_JPEG_MARKER_APP_MAX)
775             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
776
777         /* Reserved */
778         else if (marker >= 0x02 && marker <= 0xbf)
779             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
780         break;
781     }
782     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
783     return GST_VAAPI_DECODER_STATUS_SUCCESS;
784 }
785
786 static GstVaapiDecoderStatus
787 gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base_decoder,
788     GstVaapiDecoderUnit *unit)
789 {
790     GstVaapiDecoderJpeg * const decoder =
791         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
792     GstVaapiDecoderStatus status;
793     GstJpegMarkerSegment seg;
794     GstBuffer * const buffer =
795         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
796     GstMapInfo map_info;
797
798     status = ensure_decoder(decoder);
799     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
800         return status;
801
802     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
803         GST_ERROR("failed to map buffer");
804         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
805     }
806
807     seg.marker = unit_get_marker_code(unit);
808     seg.offset = unit->offset;
809     seg.size = unit->size;
810
811     status = decode_segment(decoder, &seg, map_info.data);
812     gst_buffer_unmap(buffer, &map_info);
813     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
814         return status;
815     return GST_VAAPI_DECODER_STATUS_SUCCESS;
816 }
817
818 static GstVaapiDecoderStatus
819 gst_vaapi_decoder_jpeg_start_frame(GstVaapiDecoder *base_decoder,
820     GstVaapiDecoderUnit *base_unit)
821 {
822     GstVaapiDecoderJpeg * const decoder =
823         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
824     GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
825     GstVaapiPicture *picture;
826     GstVaapiDecoderStatus status;
827
828     if (!VALID_STATE(decoder, GOT_SOF))
829         return GST_VAAPI_DECODER_STATUS_SUCCESS;
830
831     status = ensure_context(decoder);
832     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
833         GST_ERROR("failed to reset context");
834         return status;
835     }
836
837     picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
838     if (!picture) {
839         GST_ERROR("failed to allocate picture");
840         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
841     }
842     gst_vaapi_picture_replace(&priv->current_picture, picture);
843     gst_vaapi_picture_unref(picture);
844
845     if (!fill_picture(decoder, picture, &priv->frame_hdr))
846         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
847
848     status = fill_quantization_table(decoder, picture);
849     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
850         return status;
851
852     /* Update presentation time */
853     picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
854     return GST_VAAPI_DECODER_STATUS_SUCCESS;
855 }
856
857 static GstVaapiDecoderStatus
858 gst_vaapi_decoder_jpeg_end_frame(GstVaapiDecoder *base_decoder)
859 {
860     GstVaapiDecoderJpeg * const decoder =
861         GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
862
863     return decode_current_picture(decoder);
864 }
865
866 static void
867 gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
868 {
869     GstVaapiMiniObjectClass * const object_class =
870         GST_VAAPI_MINI_OBJECT_CLASS(klass);
871     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
872
873     object_class->size          = sizeof(GstVaapiDecoderJpeg);
874     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
875
876     decoder_class->create       = gst_vaapi_decoder_jpeg_create;
877     decoder_class->destroy      = gst_vaapi_decoder_jpeg_destroy;
878     decoder_class->parse        = gst_vaapi_decoder_jpeg_parse;
879     decoder_class->decode       = gst_vaapi_decoder_jpeg_decode;
880     decoder_class->start_frame  = gst_vaapi_decoder_jpeg_start_frame;
881     decoder_class->end_frame    = gst_vaapi_decoder_jpeg_end_frame;
882 }
883
884 static inline const GstVaapiDecoderClass *
885 gst_vaapi_decoder_jpeg_class(void)
886 {
887     static GstVaapiDecoderJpegClass g_class;
888     static gsize g_class_init = FALSE;
889
890     if (g_once_init_enter(&g_class_init)) {
891         gst_vaapi_decoder_jpeg_class_init(&g_class);
892         g_once_init_leave(&g_class_init, TRUE);
893     }
894     return GST_VAAPI_DECODER_CLASS(&g_class);
895 }
896
897 /**
898  * gst_vaapi_decoder_jpeg_new:
899  * @display: a #GstVaapiDisplay
900  * @caps: a #GstCaps holding codec information
901  *
902  * Creates a new #GstVaapiDecoder for JPEG decoding.  The @caps can
903  * hold extra information like codec-data and pictured coded size.
904  *
905  * Return value: the newly allocated #GstVaapiDecoder object
906  */
907 GstVaapiDecoder *
908 gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
909 {
910     return gst_vaapi_decoder_new(gst_vaapi_decoder_jpeg_class(), display, caps);
911 }