2 * Copyright (C) 2021 Intel Corporation
3 * Author: He Junyan <junyan.he@intel.com>
4 * Author: Víctor Jáquez <vjaquez@igalia.com>
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
23 * SECTION:element-vah264enc
25 * @short_description: A VA-API based H264 video encoder
27 * vah264enc encodes raw video VA surfaces into H.264 bitstreams using
28 * the installed and chosen [VA-API](https://01.org/linuxmedia/vaapi)
31 * The raw video frames in main memory can be imported into VA surfaces.
33 * ## Example launch line
35 * gst-launch-1.0 videotestsrc num-buffers=60 ! timeoverlay ! vah264enc ! h264parse ! mp4mux ! filesink location=test.mp4
43 * 1. Look ahead, which can optimize the slice type and QP.
45 * 3. The stereo encoding such as the frame-packing or MVC.
46 * 4. Weight prediction of B frame.
53 #include <gst/video/video.h>
55 #include <va/va_drmcommon.h>
57 #include <gst/codecparsers/gsth264bitwriter.h>
58 #include <gst/va/gstvautils.h>
61 #include "gstvah264enc.h"
62 #include "gstvaencoder.h"
63 #include "gstvavideoformat.h"
64 #include "gstvaallocator.h"
65 #include "gstvacaps.h"
66 #include "gstvaprofile.h"
67 #include "gstvadisplay_priv.h"
68 #include "gstvapool.h"
70 GST_DEBUG_CATEGORY_STATIC (gst_va_h264enc_debug);
71 #ifndef GST_DISABLE_GST_DEBUG
72 #define GST_CAT_DEFAULT gst_va_h264enc_debug
74 #define GST_CAT_DEFAULT NULL
77 typedef struct _GstVaH264Enc GstVaH264Enc;
78 typedef struct _GstVaH264EncClass GstVaH264EncClass;
79 typedef struct _GstVaH264EncFrame GstVaH264EncFrame;
80 typedef struct _GstVaH264LevelLimits GstVaH264LevelLimits;
82 #define GST_VA_H264_ENC(obj) ((GstVaH264Enc *) obj)
83 #define GST_VA_H264_ENC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), G_TYPE_FROM_INSTANCE (obj), GstVaH264EncClass))
84 #define GST_VA_H264_ENC_CLASS(klass) ((GstVaH264EncClass *) klass)
86 static GType gst_va_h264_enc_frame_get_type (void);
87 #define GST_TYPE_VA_H264_ENC_FRAME (gst_va_h264_enc_frame_get_type())
88 #define GST_IS_VA_H264_ENC_FRAME(obj) (GST_IS_MINI_OBJECT_TYPE((obj), GST_TYPE_VA_H264_ENC_FRAME))
89 #define GST_VA_H264_ENC_FRAME(obj) ((GstVaH264EncFrame *)(obj))
109 PROP_TARGET_PERCENTAGE,
117 static GParamSpec *properties[N_PROPERTIES];
119 /* Scale factor for bitrate (HRD bit_rate_scale: min = 6) */
121 /* Scale factor for CPB size (HRD cpb_size_scale: min = 4) */
122 #define SX_CPB_SIZE 4
123 /* Maximum sizes for common headers (in bits) */
124 #define MAX_SPS_HDR_SIZE 16473
125 #define MAX_VUI_PARAMS_SIZE 210
126 #define MAX_HRD_PARAMS_SIZE 4103
127 #define MAX_PPS_HDR_SIZE 101
128 #define MAX_SLICE_HDR_SIZE 397 + 2572 + 6670 + 2402
130 #define MAX_GOP_SIZE 1024
132 static GstObjectClass *parent_class = NULL;
135 struct _GstVaH264EncClass
137 GstVideoEncoderClass parent_class;
140 gchar *render_device_path;
142 gboolean (*reconfig) (GstVaH264Enc * encoder);
143 gboolean (*push_frame) (GstVaH264Enc * encoder,
144 GstVaH264EncFrame * frame,
146 gboolean (*pop_frame) (GstVaH264Enc * encoder,
147 GstVaH264EncFrame ** out_frame);
148 gboolean (*encode_frame) (GstVaH264Enc * encoder,
149 GstVaH264EncFrame * frame);
156 GstVideoEncoder parent_instance;
158 GstVaDisplay *display;
163 VAEntrypoint entrypoint;
174 guint32 num_ref_frames;
185 gboolean use_trellis;
190 guint32 target_percentage;
191 guint32 target_usage;
194 GstVideoCodecState *input_state;
195 GstVideoCodecState *output_state;
197 GstVideoInfo in_info;
198 GstVideoInfo sinkpad_info;
199 GstBufferPool *raw_pool;
201 GstClockTime start_pts;
202 GstClockTime frame_duration;
203 /* Total frames we handled since reconfig. */
204 guint input_frame_count;
205 guint output_frame_count;
207 GstVaEncoder *encoder;
213 guint preferred_output_delay;
219 const gchar *level_str;
220 /* Minimum Compression Ratio (A.3.1) */
224 gboolean use_trellis;
226 guint32 packed_headers;
230 /* frames between two IDR [idr, ...., idr) */
232 /* How may IDRs we have encoded */
233 guint32 total_idr_count;
234 /* frames between I/P and P frames [I, B, B, .., B, P) */
236 /* frames between I frames [I, B, B, .., B, P, ..., I), open GOP */
238 /* B frames between I/P and P. */
240 /* Use B pyramid structure in the GOP. */
242 /* Level 0 is the simple B not acting as ref. */
243 guint32 highest_pyramid_level;
244 /* If open GOP, I frames within a GOP. */
246 /* A map of all frames types within a GOP. */
251 guint8 pyramid_level;
252 /* Only for b pyramid */
253 gint left_ref_poc_diff;
254 gint right_ref_poc_diff;
255 } frame_types[MAX_GOP_SIZE];
256 /* current index in the frames types map. */
257 guint cur_frame_index;
258 /* Number of ref frames within current GOP. H264's frame num. */
260 /* Max frame num within a GOP. */
261 guint32 max_frame_num;
262 guint32 log2_max_frame_num;
263 /* Max poc within a GOP. */
264 guint32 max_pic_order_cnt;
265 guint32 log2_max_pic_order_cnt;
267 /* Total ref frames of list0 and list1. */
268 guint32 num_ref_frames;
269 guint32 ref_num_list0;
270 guint32 ref_num_list1;
272 guint num_reorder_frames;
278 guint32 rc_ctrl_mode;
285 /* macroblock bitrate control */
287 guint target_bitrate;
288 guint target_percentage;
291 guint max_bitrate_bits;
292 guint target_bitrate_bits;
293 /* length of CPB buffer */
295 /* length of CPB buffer (bits) */
296 guint cpb_length_bits;
299 GstH264SPS sequence_hdr;
302 struct _GstVaH264EncFrame
304 GstMiniObject parent;
306 GstVideoCodecFrame *frame;
307 GstVaEncodePicture *picture;
308 GstH264SliceType type;
311 /* Only for b pyramid */
312 gint left_ref_poc_diff;
313 gint right_ref_poc_diff;
318 /* The pic_num will be marked as unused_for_reference, which is
319 * replaced by this frame. -1 if we do not need to care about it
321 gint unused_for_reference_pic_num;
323 /* The total frame count we handled. */
324 guint total_frame_count;
327 GST_DEFINE_MINI_OBJECT_TYPE (GstVaH264EncFrame, gst_va_h264_enc_frame);
330 * GstVaH264LevelLimits:
331 * @name: the level name
332 * @level_idc: the H.264 level_idc value
333 * @MaxMBPS: the maximum macroblock processing rate (MB/sec)
334 * @MaxFS: the maximum frame size (MBs)
335 * @MaxDpbMbs: the maxium decoded picture buffer size (MBs)
336 * @MaxBR: the maximum video bit rate (kbps)
337 * @MaxCPB: the maximum CPB size (kbits)
338 * @MinCR: the minimum Compression Ratio
340 * The data structure that describes the limits of an H.264 level.
342 struct _GstVaH264LevelLimits
354 /* Table A-1 - Level limits */
356 static const GstVaH264LevelLimits _va_h264_level_limits[] = {
357 /* level idc MaxMBPS MaxFS MaxDpbMbs MaxBR MaxCPB MinCr */
358 { "1", 10, 1485, 99, 396, 64, 175, 2 },
359 { "1b", 11, 1485, 99, 396, 128, 350, 2 },
360 { "1.1", 11, 3000, 396, 900, 192, 500, 2 },
361 { "1.2", 12, 6000, 396, 2376, 384, 1000, 2 },
362 { "1.3", 13, 11880, 396, 2376, 768, 2000, 2 },
363 { "2", 20, 11880, 396, 2376, 2000, 2000, 2 },
364 { "2.1", 21, 19800, 792, 4752, 4000, 4000, 2 },
365 { "2.2", 22, 20250, 1620, 8100, 4000, 4000, 2 },
366 { "3", 30, 40500, 1620, 8100, 10000, 10000, 2 },
367 { "3.1", 31, 108000, 3600, 18000, 14000, 14000, 4 },
368 { "3.2", 32, 216000, 5120, 20480, 20000, 20000, 4 },
369 { "4", 40, 245760, 8192, 32768, 20000, 25000, 4 },
370 { "4.1", 41, 245760, 8192, 32768, 50000, 62500, 2 },
371 { "4.2", 42, 522240, 8704, 34816, 50000, 62500, 2 },
372 { "5", 50, 589824, 22080, 110400, 135000, 135000, 2 },
373 { "5.1", 51, 983040, 36864, 184320, 240000, 240000, 2 },
374 { "5.2", 52, 2073600, 36864, 184320, 240000, 240000, 2 },
375 { "6", 60, 4177920, 139264, 696320, 240000, 240000, 2 },
376 { "6.1", 61, 8355840, 139264, 696320, 480000, 480000, 2 },
377 { "6.2", 62, 16711680, 139264, 696320, 800000, 800000, 2 },
382 _slice_type_name (GstH264SliceType type)
385 case GST_H264_P_SLICE:
387 case GST_H264_B_SLICE:
389 case GST_H264_I_SLICE:
392 g_assert_not_reached ();
399 _rate_control_get_name (guint32 rc_mode)
414 g_assert_not_reached ();
419 * GstVaH264EncRateControl:
424 gst_va_h264_enc_rate_control_get_type (void)
426 static gsize type = 0;
428 static const GEnumValue values[] = {
429 {VA_RC_CBR, "Constant Bitrate", "cbr"},
430 {VA_RC_VBR, "Variable Bitrate", "vbr"},
431 {VA_RC_VCM, "Video Conferencing Mode (Non HRD compliant)", "vcm"},
432 {VA_RC_CQP, "Constant Quantizer", "cqp"},
436 if (g_once_init_enter (&type)) {
439 _type = g_enum_register_static ("GstVaH264EncRateControl", values);
440 g_once_init_leave (&type, _type);
452 gst_va_h264_enc_mbbrc_get_type (void)
454 static gsize type = 0;
456 static const GEnumValue values[] = {
457 {0, "Auto choose", "auto"},
458 {1, "Always enable", "enable"},
459 {2, "Always disable", "disable"},
463 if (g_once_init_enter (&type)) {
466 _type = g_enum_register_static ("GstVaH264Mbbrc", values);
467 g_once_init_leave (&type, _type);
474 gst_va_enc_frame_free (GstVaH264EncFrame * frame)
476 g_clear_pointer (&frame->picture, gst_va_encode_picture_free);
477 g_clear_pointer (&frame->frame, gst_video_codec_frame_unref);
482 /* Normalizes bitrate (and CPB size) for HRD conformance */
484 _calculate_bitrate_hrd (GstVaH264Enc * self)
486 guint bitrate_bits, cpb_bits_size;
488 /* Round down bitrate. This is a hard limit mandated by the user */
489 g_assert (SX_BITRATE >= 6);
490 bitrate_bits = (self->rc.max_bitrate * 1000) & ~((1U << SX_BITRATE) - 1);
491 GST_DEBUG_OBJECT (self, "Max bitrate: %u bits/sec", bitrate_bits);
492 self->rc.max_bitrate_bits = bitrate_bits;
494 bitrate_bits = (self->rc.target_bitrate * 1000) & ~((1U << SX_BITRATE) - 1);
495 GST_DEBUG_OBJECT (self, "Target bitrate: %u bits/sec", bitrate_bits);
496 self->rc.target_bitrate_bits = bitrate_bits;
498 if (self->rc.cpb_size > 0 && self->rc.cpb_size < (self->rc.max_bitrate / 2)) {
499 GST_INFO_OBJECT (self, "Too small cpb_size: %d", self->rc.cpb_size);
500 self->rc.cpb_size = 0;
503 if (self->rc.cpb_size == 0) {
504 /* We cache 2 second coded data by default. */
505 self->rc.cpb_size = self->rc.max_bitrate * 2;
506 GST_INFO_OBJECT (self, "Adjust cpb_size to: %d", self->rc.cpb_size);
509 /* Round up CPB size. This is an HRD compliance detail */
510 g_assert (SX_CPB_SIZE >= 4);
511 cpb_bits_size = (self->rc.cpb_size * 1000) & ~((1U << SX_CPB_SIZE) - 1);
513 GST_DEBUG_OBJECT (self, "HRD CPB size: %u bits", cpb_bits_size);
514 self->rc.cpb_length_bits = cpb_bits_size;
517 /* Estimates a good enough bitrate if none was supplied */
519 _ensure_rate_control (GstVaH264Enc * self)
521 /* User can specify the properties of: "bitrate", "target-percentage",
522 * "max-qp", "min-qp", "qpi", "qpp", "qpb", "mbbrc", "cpb-size",
523 * "rate-control" and "target-usage" to control the RC behavior.
525 * "target-usage" is different from the others, it controls the encoding
526 * speed and quality, while the others control encoding bit rate and
527 * quality. The lower value has better quality(maybe bigger MV search
528 * range) but slower speed, the higher value has faster speed but lower
531 * The possible composition to control the bit rate and quality:
533 * 1. CQP mode: "rate-control=cqp", then "qpi", "qpp" and "qpb"
534 * specify the QP of I/P/B frames respectively(within the
535 * "max-qp" and "min-qp" range). The QP will not change during
536 * the whole stream. Other properties are ignored.
538 * 2. CBR mode: "rate-control=CBR", then the "bitrate" specify the
539 * target bit rate and the "cpb-size" specifies the max coded
540 * picture buffer size to avoid overflow. If the "bitrate" is not
541 * set, it is calculated by the picture resolution and frame
542 * rate. If "cpb-size" is not set, it is set to the size of
543 * caching 2 second coded data. Encoder will try its best to make
544 * the QP with in the ["max-qp", "min-qp"] range. "mbbrc" can
545 * enable bit rate control in macro block level. Other paramters
548 * 3. VBR mode: "rate-control=VBR", then the "bitrate" specify the
549 * target bit rate, "target-percentage" is used to calculate the
550 * max bit rate of VBR mode by ("bitrate" * 100) /
551 * "target-percentage". It is also used by driver to calculate
552 * the min bit rate. The "cpb-size" specifies the max coded
553 * picture buffer size to avoid overflow. If the "bitrate" is not
554 * set, the target bit rate will be calculated by the picture
555 * resolution and frame rate. Encoder will try its best to make
556 * the QP with in the ["max-qp", "min-qp"] range. "mbbrc" can
557 * enable bit rate control in macro block level. Other paramters
560 * 4. VCM mode: "rate-control=VCM", then the "bitrate" specify the
561 * target bit rate, and encoder will try its best to make the QP
562 * with in the ["max-qp", "min-qp"] range. Other paramters are
568 guint32 quality_level;
570 quality_level = gst_va_encoder_get_quality_level (self->encoder,
571 self->profile, self->entrypoint);
572 if (self->rc.target_usage > quality_level) {
573 GST_INFO_OBJECT (self, "User setting target-usage: %d is not supported, "
574 "fallback to %d", self->rc.target_usage, quality_level);
575 self->rc.target_usage = quality_level;
577 self->prop.target_usage = self->rc.target_usage;
578 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_TARGET_USAGE]);
581 /* TODO: find a better heuristics to infer a nearer control mode */
582 rc_mode = gst_va_encoder_get_rate_control_mode (self->encoder,
583 self->profile, self->entrypoint);
584 if (!(rc_mode & self->prop.rc_ctrl)) {
585 GST_INFO_OBJECT (self, "The race control mode %s is not supported, "
586 "fallback to %s mode",
587 _rate_control_get_name (self->prop.rc_ctrl),
588 _rate_control_get_name (VA_RC_CQP));
589 self->rc.rc_ctrl_mode = VA_RC_CQP;
591 self->prop.rc_ctrl = self->rc.rc_ctrl_mode;
592 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_RATE_CONTROL]);
595 if (self->rc.min_qp > self->rc.max_qp) {
596 GST_INFO_OBJECT (self, "The min_qp %d is bigger than the max_qp %d, "
597 "set it to the max_qp", self->rc.min_qp, self->rc.max_qp);
598 self->rc.min_qp = self->rc.max_qp;
600 self->prop.min_qp = self->rc.min_qp;
601 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_MIN_QP]);
604 /* Make all the qp in the valid range */
605 if (self->rc.qp_i < self->rc.min_qp) {
606 if (self->rc.qp_i != 26)
607 GST_INFO_OBJECT (self, "The qp_i %d is smaller than the min_qp %d, "
608 "set it to the min_qp", self->rc.qp_i, self->rc.min_qp);
609 self->rc.qp_i = self->rc.min_qp;
611 if (self->rc.qp_i > self->rc.max_qp) {
612 if (self->rc.qp_i != 26)
613 GST_INFO_OBJECT (self, "The qp_i %d is bigger than the max_qp %d, "
614 "set it to the max_qp", self->rc.qp_i, self->rc.max_qp);
615 self->rc.qp_i = self->rc.max_qp;
618 if (self->rc.qp_p < self->rc.min_qp) {
619 if (self->rc.qp_p != 26)
620 GST_INFO_OBJECT (self, "The qp_p %d is smaller than the min_qp %d, "
621 "set it to the min_qp", self->rc.qp_p, self->rc.min_qp);
622 self->rc.qp_p = self->rc.min_qp;
624 if (self->rc.qp_p > self->rc.max_qp) {
625 if (self->rc.qp_p != 26)
626 GST_INFO_OBJECT (self, "The qp_p %d is bigger than the max_qp %d, "
627 "set it to the max_qp", self->rc.qp_p, self->rc.max_qp);
628 self->rc.qp_p = self->rc.max_qp;
631 if (self->rc.qp_b < self->rc.min_qp) {
632 if (self->rc.qp_b != 26)
633 GST_INFO_OBJECT (self, "The qp_b %d is smaller than the min_qp %d, "
634 "set it to the min_qp", self->rc.qp_b, self->rc.min_qp);
635 self->rc.qp_b = self->rc.min_qp;
637 if (self->rc.qp_b > self->rc.max_qp) {
638 if (self->rc.qp_b != 26)
639 GST_INFO_OBJECT (self, "The qp_b %d is bigger than the max_qp %d, "
640 "set it to the max_qp", self->rc.qp_b, self->rc.max_qp);
641 self->rc.qp_b = self->rc.max_qp;
644 bitrate = self->prop.bitrate;
645 /* Calculate a bitrate is not set. */
646 if ((self->rc.rc_ctrl_mode == VA_RC_CBR || self->rc.rc_ctrl_mode == VA_RC_VBR
647 || self->rc.rc_ctrl_mode == VA_RC_VCM) && bitrate == 0) {
648 /* Default compression: 48 bits per macroblock in "high-compression" mode */
649 guint bits_per_mb = 48;
652 /* According to the literature and testing, CABAC entropy coding
653 * mode could provide for +10% to +18% improvement in general,
654 * thus estimating +15% here ; and using adaptive 8x8 transforms
655 * in I-frames could bring up to +10% improvement. */
656 if (!self->use_cabac)
657 bits_per_mb += (bits_per_mb * 15) / 100;
658 if (!self->use_dct8x8)
659 bits_per_mb += (bits_per_mb * 10) / 100;
661 factor = (guint64) self->mb_width * self->mb_height * bits_per_mb;
662 bitrate = gst_util_uint64_scale (factor,
663 GST_VIDEO_INFO_FPS_N (&self->in_info),
664 GST_VIDEO_INFO_FPS_D (&self->in_info)) / 1000;
665 GST_INFO_OBJECT (self, "target bitrate computed to %u kbps", bitrate);
668 /* Adjust the setting based on RC mode. */
669 switch (self->rc.rc_ctrl_mode) {
671 self->rc.max_bitrate = 0;
672 self->rc.target_bitrate = 0;
673 self->rc.target_percentage = 0;
674 self->rc.cpb_size = 0;
677 self->rc.max_bitrate = bitrate;
678 self->rc.target_bitrate = bitrate;
679 self->rc.target_percentage = 100;
680 self->rc.qp_i = self->rc.qp_p = self->rc.qp_b = 26;
683 g_assert (self->rc.target_percentage >= 10);
684 self->rc.max_bitrate = (guint) gst_util_uint64_scale_int (bitrate,
685 100, self->rc.target_percentage);
686 self->rc.target_bitrate = bitrate;
687 self->rc.qp_i = self->rc.qp_p = self->rc.qp_b = 26;
690 self->rc.max_bitrate = bitrate;
691 self->rc.target_bitrate = bitrate;
692 self->rc.target_percentage = 0;
693 self->rc.qp_i = self->rc.qp_p = self->rc.qp_b = 26;
694 self->rc.cpb_size = 0;
696 if (self->gop.num_bframes > 0) {
697 GST_INFO_OBJECT (self, "VCM mode just support I/P mode, no B frame");
698 self->gop.num_bframes = 0;
699 self->gop.b_pyramid = FALSE;
704 if (self->rc.rc_ctrl_mode != VA_RC_CQP)
705 _calculate_bitrate_hrd (self);
708 if (self->rc.cpb_size != self->prop.cpb_size) {
709 self->prop.cpb_size = self->rc.cpb_size;
710 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_CPB_SIZE]);
713 if (self->prop.target_percentage != self->rc.target_percentage) {
714 self->prop.target_percentage = self->rc.target_percentage;
715 g_object_notify_by_pspec (G_OBJECT (self),
716 properties[PROP_TARGET_PERCENTAGE]);
719 if (self->prop.qp_i != self->rc.qp_i) {
720 self->prop.qp_i = self->rc.qp_i;
721 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_QP_I]);
723 if (self->prop.qp_p != self->rc.qp_p) {
724 self->prop.qp_p = self->rc.qp_p;
725 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_QP_P]);
727 if (self->prop.qp_b != self->rc.qp_b) {
728 self->prop.qp_b = self->rc.qp_b;
729 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_QP_B]);
734 _get_h264_cpb_nal_factor (VAProfile profile)
740 case VAProfileH264High:
743 case VAProfileH264ConstrainedBaseline:
744 case VAProfileH264Main:
747 case VAProfileH264MultiviewHigh:
748 case VAProfileH264StereoHigh:
749 f = 1500; /* H.10.2.1 (r) */
752 g_assert_not_reached ();
759 /* Derives the level from the currently set limits */
761 _calculate_level (GstVaH264Enc * self)
763 const guint cpb_factor = _get_h264_cpb_nal_factor (self->profile);
764 guint i, PicSizeMbs, MaxDpbMbs, MaxMBPS;
766 PicSizeMbs = self->mb_width * self->mb_height;
767 MaxDpbMbs = PicSizeMbs * (self->gop.num_ref_frames + 1);
768 MaxMBPS = gst_util_uint64_scale_int_ceil (PicSizeMbs,
769 GST_VIDEO_INFO_FPS_N (&self->in_info),
770 GST_VIDEO_INFO_FPS_D (&self->in_info));
772 for (i = 0; i < G_N_ELEMENTS (_va_h264_level_limits); i++) {
773 const GstVaH264LevelLimits *const limits = &_va_h264_level_limits[i];
774 if (PicSizeMbs <= limits->MaxFS && MaxDpbMbs <= limits->MaxDpbMbs
775 && MaxMBPS <= limits->MaxMBPS && (!self->rc.max_bitrate_bits
776 || self->rc.max_bitrate_bits <= (limits->MaxBR * 1000 * cpb_factor))
777 && (!self->rc.cpb_length_bits
778 || self->rc.cpb_length_bits <=
779 (limits->MaxCPB * 1000 * cpb_factor))) {
781 self->level_idc = _va_h264_level_limits[i].level_idc;
782 self->level_str = _va_h264_level_limits[i].name;
783 self->min_cr = _va_h264_level_limits[i].MinCR;
789 GST_ERROR_OBJECT (self,
790 "failed to find a suitable level matching codec config");
795 _validate_parameters (GstVaH264Enc * self)
799 /* Ensure the num_slices provided by the user not exceed the limit
800 * of the number of slices permitted by the stream and by the
802 g_assert (self->num_slices >= 1);
803 max_slices = gst_va_encoder_get_max_slice_num (self->encoder,
804 self->profile, self->entrypoint);
805 if (self->num_slices > max_slices)
806 self->num_slices = max_slices;
807 /* The stream size limit. */
808 if (self->num_slices > ((self->mb_width * self->mb_height + 1) / 2))
809 self->num_slices = ((self->mb_width * self->mb_height + 1) / 2);
811 if (self->prop.num_slices != self->num_slices) {
812 self->prop.num_slices = self->num_slices;
813 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_NUM_SLICES]);
816 /* Ensure trellis. */
817 if (self->use_trellis &&
818 !gst_va_encoder_has_trellis (self->encoder, self->profile,
820 GST_INFO_OBJECT (self, "The trellis is not supported");
821 self->use_trellis = FALSE;
824 if (self->prop.use_trellis != self->use_trellis) {
825 self->prop.use_trellis = self->use_trellis;
826 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_TRELLIS]);
830 /* Get log2_max_frame_num_minus4, log2_max_pic_order_cnt_lsb_minus4
831 * value, shall be in the range of 0 to 12, inclusive. */
833 _get_log2_max_num (guint num)
842 /* shall be in the range of 0+4 to 12+4, inclusive. */
845 } else if (ret > 16) {
852 _print_gop_structure (GstVaH264Enc * self)
854 #ifndef GST_DISABLE_GST_DEBUG
858 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_INFO)
861 str = g_string_new (NULL);
863 g_string_append_printf (str, "[ ");
865 for (i = 0; i < self->gop.idr_period; i++) {
867 g_string_append_printf (str, "IDR");
870 g_string_append_printf (str, ", ");
873 g_string_append_printf (str, "%s",
874 _slice_type_name (self->gop.frame_types[i].slice_type));
876 if (self->gop.b_pyramid
877 && self->gop.frame_types[i].slice_type == GST_H264_B_SLICE) {
878 g_string_append_printf (str, "<L%d (%d, %d)>",
879 self->gop.frame_types[i].pyramid_level,
880 self->gop.frame_types[i].left_ref_poc_diff,
881 self->gop.frame_types[i].right_ref_poc_diff);
884 if (self->gop.frame_types[i].is_ref) {
885 g_string_append_printf (str, "(ref)");
890 g_string_append_printf (str, " ]");
892 GST_INFO_OBJECT (self, "GOP size: %d, forward reference %d, backward"
893 " reference %d, GOP structure: %s", self->gop.idr_period,
894 self->gop.ref_num_list0, self->gop.ref_num_list1, str->str);
896 g_string_free (str, TRUE);
903 gint left_ref_poc_diff;
904 gint right_ref_poc_diff;
908 _set_pyramid_info (struct PyramidInfo *info, guint len, guint level)
914 if (level == 0 || len == 1) {
915 for (index = 0; index < len; index++) {
916 info[index].level = level;
917 info[index].left_ref_poc_diff = (index + 1) * -2;
918 info[index].right_ref_poc_diff = (len - index) * 2;
925 info[index].level = level;
926 info[index].left_ref_poc_diff = (index + 1) * -2;
927 info[index].right_ref_poc_diff = (len - index) * 2;
932 _set_pyramid_info (info, index, level);
935 _set_pyramid_info (&info[index + 1], len - (index + 1), level);
939 _create_gop_frame_types (GstVaH264Enc * self)
942 guint i_frames = self->gop.num_iframes;
943 struct PyramidInfo pyramid_info[31] = { 0, };
945 if (self->gop.highest_pyramid_level > 0) {
946 g_assert (self->gop.num_bframes > 0);
947 _set_pyramid_info (pyramid_info, self->gop.num_bframes,
948 self->gop.highest_pyramid_level);
951 g_assert (self->gop.idr_period <= MAX_GOP_SIZE);
952 for (i = 0; i < self->gop.idr_period; i++) {
954 self->gop.frame_types[i].slice_type = GST_H264_I_SLICE;
955 self->gop.frame_types[i].is_ref = TRUE;
959 /* Intra only stream. */
960 if (self->gop.ip_period == 0) {
961 self->gop.frame_types[i].slice_type = GST_H264_I_SLICE;
962 self->gop.frame_types[i].is_ref = FALSE;
966 if (i % self->gop.ip_period) {
967 guint pyramid_index =
968 i % self->gop.ip_period - 1 /* The first P or IDR */ ;
970 self->gop.frame_types[i].slice_type = GST_H264_B_SLICE;
971 self->gop.frame_types[i].pyramid_level =
972 pyramid_info[pyramid_index].level;
973 self->gop.frame_types[i].is_ref =
974 (self->gop.frame_types[i].pyramid_level > 0);
975 self->gop.frame_types[i].left_ref_poc_diff =
976 pyramid_info[pyramid_index].left_ref_poc_diff;
977 self->gop.frame_types[i].right_ref_poc_diff =
978 pyramid_info[pyramid_index].right_ref_poc_diff;
982 if (self->gop.i_period && i % self->gop.i_period == 0 && i_frames > 0) {
983 /* Replace P with I. */
984 self->gop.frame_types[i].slice_type = GST_H264_I_SLICE;
985 self->gop.frame_types[i].is_ref = TRUE;
990 self->gop.frame_types[i].slice_type = GST_H264_P_SLICE;
991 self->gop.frame_types[i].is_ref = TRUE;
994 /* Force the last one to be a P */
995 if (self->gop.idr_period > 1 && self->gop.ip_period > 0) {
996 self->gop.frame_types[self->gop.idr_period - 1].slice_type =
998 self->gop.frame_types[self->gop.idr_period - 1].is_ref = TRUE;
1002 /* Consider the idr_period, num_bframes, L0/L1 reference number.
1003 * TODO: Load some preset fixed GOP structure.
1004 * TODO: Skip this if in lookahead mode. */
1006 _generate_gop_structure (GstVaH264Enc * self)
1008 guint32 list0, list1, gop_ref_num;
1011 /* If not set, generate a idr every second */
1012 if (self->gop.idr_period == 0) {
1013 self->gop.idr_period = (GST_VIDEO_INFO_FPS_N (&self->in_info)
1014 + GST_VIDEO_INFO_FPS_D (&self->in_info) - 1) /
1015 GST_VIDEO_INFO_FPS_D (&self->in_info);
1018 /* Do not use a too huge GOP size. */
1019 if (self->gop.idr_period > 1024) {
1020 self->gop.idr_period = 1024;
1021 GST_INFO_OBJECT (self, "Lowering the GOP size to %d", self->gop.idr_period);
1024 if (self->gop.idr_period != self->prop.key_int_max) {
1025 self->prop.key_int_max = self->gop.idr_period;
1026 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_KEY_INT_MAX]);
1029 /* Prefer have more than 1 refs for the GOP which is not very small. */
1030 if (self->gop.idr_period > 8) {
1031 if (self->gop.num_bframes > (self->gop.idr_period - 1) / 2) {
1032 self->gop.num_bframes = (self->gop.idr_period - 1) / 2;
1033 GST_INFO_OBJECT (self, "Lowering the number of num_bframes to %d",
1034 self->gop.num_bframes);
1037 /* beign and end should be ref */
1038 if (self->gop.num_bframes > self->gop.idr_period - 1 - 1) {
1039 if (self->gop.idr_period > 1) {
1040 self->gop.num_bframes = self->gop.idr_period - 1 - 1;
1042 self->gop.num_bframes = 0;
1044 GST_INFO_OBJECT (self, "Lowering the number of num_bframes to %d",
1045 self->gop.num_bframes);
1049 if (!gst_va_encoder_get_max_num_reference (self->encoder, self->profile,
1050 self->entrypoint, &list0, &list1)) {
1051 GST_INFO_OBJECT (self, "Failed to get the max num reference");
1056 if (list0 > self->gop.num_ref_frames)
1057 list0 = self->gop.num_ref_frames;
1058 if (list1 > self->gop.num_ref_frames)
1059 list1 = self->gop.num_ref_frames;
1062 GST_INFO_OBJECT (self,
1063 "No reference support, fallback to intra only stream");
1065 /* It does not make sense that if only the list1 exists. */
1066 self->gop.num_ref_frames = 0;
1068 self->gop.ip_period = 0;
1069 self->gop.num_bframes = 0;
1070 self->gop.b_pyramid = FALSE;
1071 self->gop.highest_pyramid_level = 0;
1072 self->gop.num_iframes = self->gop.idr_period - 1 /* The idr */ ;
1073 self->gop.ref_num_list0 = 0;
1074 self->gop.ref_num_list1 = 0;
1078 if (self->gop.num_ref_frames <= 1) {
1079 GST_INFO_OBJECT (self, "The number of reference frames is only %d,"
1080 " no B frame allowed, fallback to I/P mode", self->gop.num_ref_frames);
1081 self->gop.num_bframes = 0;
1085 /* b_pyramid needs at least 1 ref for B, besides the I/P */
1086 if (self->gop.b_pyramid && self->gop.num_ref_frames <= 2) {
1087 GST_INFO_OBJECT (self, "The number of reference frames is only %d,"
1088 " not enough for b_pyramid", self->gop.num_ref_frames);
1089 self->gop.b_pyramid = FALSE;
1092 if (list1 == 0 && self->gop.num_bframes > 0) {
1093 GST_INFO_OBJECT (self,
1094 "No hw reference support for list 1, fallback to I/P mode");
1095 self->gop.num_bframes = 0;
1096 self->gop.b_pyramid = FALSE;
1099 /* I/P mode, no list1 needed. */
1100 if (self->gop.num_bframes == 0)
1103 /* Not enough B frame, no need for b_pyramid. */
1104 if (self->gop.num_bframes <= 1)
1105 self->gop.b_pyramid = FALSE;
1107 /* b pyramid has only one backward ref. */
1108 if (self->gop.b_pyramid)
1111 if (self->gop.num_ref_frames > list0 + list1) {
1112 self->gop.num_ref_frames = list0 + list1;
1113 GST_INFO_OBJECT (self, "HW limits, lowering the number of reference"
1114 " frames to %d", self->gop.num_ref_frames);
1117 /* How many possible refs within a GOP. */
1118 gop_ref_num = (self->gop.idr_period + self->gop.num_bframes) /
1119 (self->gop.num_bframes + 1);
1121 if (self->gop.num_bframes > 0
1122 /* frame_num % (self->gop.num_bframes + 1) happens to be the end P */
1123 && (self->gop.idr_period % (self->gop.num_bframes + 1) != 1))
1126 /* Adjust reference num based on B frames and B pyramid. */
1127 if (self->gop.num_bframes == 0) {
1128 self->gop.b_pyramid = FALSE;
1129 self->gop.ref_num_list0 = self->gop.num_ref_frames;
1130 self->gop.ref_num_list1 = 0;
1131 } else if (self->gop.b_pyramid) {
1132 guint b_frames = self->gop.num_bframes;
1135 /* b pyramid has only one backward ref. */
1136 g_assert (list1 == 1);
1137 self->gop.ref_num_list1 = list1;
1138 self->gop.ref_num_list0 =
1139 self->gop.num_ref_frames - self->gop.ref_num_list1;
1141 b_frames = b_frames / 2;
1144 /* At least 1 B ref for each level, plus begin and end 2 P/I */
1146 if (b_refs + 2 > self->gop.num_ref_frames)
1149 self->gop.highest_pyramid_level++;
1150 b_frames = b_frames / 2;
1153 GST_INFO_OBJECT (self, "pyramid level is %d",
1154 self->gop.highest_pyramid_level);
1156 /* We prefer list0. Backward refs have more latency. */
1157 self->gop.ref_num_list1 = 1;
1158 self->gop.ref_num_list0 =
1159 self->gop.num_ref_frames - self->gop.ref_num_list1;
1160 /* Balance the forward and backward refs, but not cause a big latency. */
1161 while ((self->gop.num_bframes * self->gop.ref_num_list1 <= 16)
1162 && (self->gop.ref_num_list1 <= gop_ref_num)
1163 && (self->gop.ref_num_list1 < list1)
1164 && (self->gop.ref_num_list0 / self->gop.ref_num_list1 > 4)) {
1165 self->gop.ref_num_list0--;
1166 self->gop.ref_num_list1++;
1170 /* It's OK, keep slots for GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME frame. */
1171 if (self->gop.ref_num_list0 > gop_ref_num)
1172 GST_DEBUG_OBJECT (self, "num_ref_frames %d is bigger than gop_ref_num %d",
1173 self->gop.ref_num_list0, gop_ref_num);
1175 /* Include the ref picture itself. */
1176 self->gop.ip_period = 1 + self->gop.num_bframes;
1178 p_frames = gop_ref_num - 1 /* IDR */ ;
1181 if (self->gop.num_iframes > p_frames) {
1182 self->gop.num_iframes = p_frames;
1183 GST_INFO_OBJECT (self, "Too many I frames insertion, lowering it to %d",
1184 self->gop.num_iframes);
1187 if (self->gop.num_iframes > 0) {
1188 guint total_i_frames = self->gop.num_iframes + 1 /* IDR */ ;
1189 self->gop.i_period =
1190 (gop_ref_num / total_i_frames) * (self->gop.num_bframes + 1);
1194 /* init max_frame_num, max_poc */
1195 self->gop.log2_max_frame_num = _get_log2_max_num (self->gop.idr_period);
1196 self->gop.max_frame_num = (1 << self->gop.log2_max_frame_num);
1197 self->gop.log2_max_pic_order_cnt = self->gop.log2_max_frame_num + 1;
1198 self->gop.max_pic_order_cnt = (1 << self->gop.log2_max_pic_order_cnt);
1199 self->gop.num_reorder_frames = self->gop.b_pyramid ?
1200 self->gop.highest_pyramid_level * 2 + 1 /* the last P frame. */ :
1201 self->gop.ref_num_list1;
1202 /* Should not exceed the max ref num. */
1203 self->gop.num_reorder_frames =
1204 MIN (self->gop.num_reorder_frames, self->gop.num_ref_frames);
1205 self->gop.num_reorder_frames = MIN (self->gop.num_reorder_frames, 16);
1207 _create_gop_frame_types (self);
1208 _print_gop_structure (self);
1211 if (self->prop.num_ref_frames != self->gop.num_ref_frames) {
1212 self->prop.num_ref_frames = self->gop.num_ref_frames;
1213 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_NUM_REF_FRAMES]);
1216 if (self->prop.num_iframes != self->gop.num_iframes) {
1217 self->prop.num_iframes = self->gop.num_iframes;
1218 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_IFRAMES]);
1224 _calculate_coded_size (GstVaH264Enc * self)
1226 guint codedbuf_size = 0;
1228 if (self->profile == VAProfileH264High
1229 || self->profile == VAProfileH264MultiviewHigh
1230 || self->profile == VAProfileH264StereoHigh) {
1231 /* The number of bits of macroblock_layer( ) data for any macroblock
1232 is not greater than 128 + RawMbBits */
1233 guint RawMbBits = 0;
1234 guint BitDepthY = 8;
1235 guint BitDepthC = 8;
1237 guint MbHeightC = 8;
1239 switch (self->rt_format) {
1240 case VA_RT_FORMAT_YUV420:
1246 case VA_RT_FORMAT_YUV422:
1252 case VA_RT_FORMAT_YUV444:
1258 case VA_RT_FORMAT_YUV400:
1264 case VA_RT_FORMAT_YUV420_10:
1269 case VA_RT_FORMAT_YUV422_10:
1274 case VA_RT_FORMAT_YUV444_10:
1281 g_assert_not_reached ();
1285 /* The variable RawMbBits is derived as
1286 * RawMbBits = 256 * BitDepthY + 2 * MbWidthC * MbHeightC * BitDepthC */
1287 RawMbBits = 256 * BitDepthY + 2 * MbWidthC * MbHeightC * BitDepthC;
1288 codedbuf_size = (self->mb_width * self->mb_height) * (128 + RawMbBits) / 8;
1290 /* The number of bits of macroblock_layer( ) data for any macroblock
1291 * is not greater than 3200 */
1292 codedbuf_size = (self->mb_width * self->mb_height) * (3200 / 8);
1295 /* Account for SPS header */
1296 /* XXX: exclude scaling lists, MVC/SVC extensions */
1297 codedbuf_size += 4 /* start code */ + GST_ROUND_UP_8 (MAX_SPS_HDR_SIZE +
1298 MAX_VUI_PARAMS_SIZE + 2 * MAX_HRD_PARAMS_SIZE) / 8;
1300 /* Account for PPS header */
1301 /* XXX: exclude slice groups, scaling lists, MVC/SVC extensions */
1302 codedbuf_size += 4 + GST_ROUND_UP_8 (MAX_PPS_HDR_SIZE) / 8;
1304 /* Account for slice header */
1306 self->num_slices * (4 + GST_ROUND_UP_8 (MAX_SLICE_HDR_SIZE) / 8);
1308 /* Add 5% for safety */
1309 self->codedbuf_size = (guint) ((gfloat) codedbuf_size * 1.05);
1311 GST_DEBUG_OBJECT (self, "Calculate codedbuf size: %u", self->codedbuf_size);
1315 _get_rtformat (GstVaH264Enc * self, GstVideoFormat format)
1319 chroma = gst_va_chroma_from_video_format (format);
1321 /* Check whether the rtformat is supported. */
1322 if (chroma != VA_RT_FORMAT_YUV420) {
1323 GST_ERROR_OBJECT (self, "Unsupported chroma for video format: %s",
1324 gst_video_format_to_string (format));
1332 _init_packed_headers (GstVaH264Enc * self)
1334 guint32 packed_headers;
1335 guint32 desired_packed_headers = VA_ENC_PACKED_HEADER_SEQUENCE /* SPS */
1336 | VA_ENC_PACKED_HEADER_PICTURE /* PPS */
1337 | VA_ENC_PACKED_HEADER_SLICE /* Slice headers */
1338 | VA_ENC_PACKED_HEADER_RAW_DATA; /* SEI, AUD, etc. */
1340 self->packed_headers = 0;
1342 packed_headers = gst_va_encoder_get_packed_headers (self->encoder,
1343 self->profile, self->entrypoint);
1345 if (packed_headers == 0)
1348 if (desired_packed_headers & ~packed_headers) {
1349 GST_INFO_OBJECT (self, "Driver does not support some wanted packed headers "
1350 "(wanted %#x, found %#x)", desired_packed_headers, packed_headers);
1353 self->packed_headers = desired_packed_headers & packed_headers;
1360 _decide_profile (GstVaH264Enc * self)
1362 gboolean ret = FALSE;
1363 GstVideoFormat in_format;
1366 GstCaps *allowed_caps = NULL;
1367 guint num_structures, i;
1368 GstStructure *structure;
1369 const GValue *v_profile;
1370 GPtrArray *candidates = NULL;
1371 gchar *profile_name;
1373 candidates = g_ptr_array_new_with_free_func (g_free);
1375 /* First, check whether the downstream requires a specified profile. */
1376 allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (self));
1378 allowed_caps = gst_pad_query_caps (GST_VIDEO_ENCODER_SRC_PAD (self), NULL);
1380 if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
1381 num_structures = gst_caps_get_size (allowed_caps);
1382 for (i = 0; i < num_structures; i++) {
1383 structure = gst_caps_get_structure (allowed_caps, i);
1384 v_profile = gst_structure_get_value (structure, "profile");
1388 if (G_VALUE_HOLDS_STRING (v_profile)) {
1389 profile_name = g_strdup (g_value_get_string (v_profile));
1390 g_ptr_array_add (candidates, profile_name);
1391 } else if (GST_VALUE_HOLDS_LIST (v_profile)) {
1394 for (j = 0; j < gst_value_list_get_size (v_profile); j++) {
1395 const GValue *p = gst_value_list_get_value (v_profile, j);
1399 profile_name = g_strdup (g_value_get_string (p));
1400 g_ptr_array_add (candidates, profile_name);
1406 if (candidates->len == 0) {
1407 GST_ERROR_OBJECT (self, "No available profile in caps");
1412 in_format = GST_VIDEO_INFO_FORMAT (&self->in_info);
1413 rt_format = _get_rtformat (self, in_format);
1415 GST_ERROR_OBJECT (self, "unsupported video format %s",
1416 gst_video_format_to_string (in_format));
1421 /* Find the suitable profile by features and check the HW
1424 for (i = 0; i < candidates->len; i++) {
1425 profile_name = g_ptr_array_index (candidates, i);
1427 /* dct8x8 require at least high profile. */
1428 if (self->use_dct8x8) {
1429 if (!g_strstr_len (profile_name, -1, "high"))
1433 /* cabac require at least main profile. */
1434 if (self->use_cabac) {
1435 if (!g_strstr_len (profile_name, -1, "main")
1436 && !g_strstr_len (profile_name, -1, "high"))
1440 /* baseline only support I/P mode. */
1441 if (self->gop.num_bframes > 0) {
1442 if (g_strstr_len (profile_name, -1, "baseline"))
1446 profile = gst_va_profile_from_name (H264, profile_name);
1447 if (profile == VAProfileNone)
1450 if (!gst_va_encoder_has_profile_and_entrypoint (self->encoder,
1451 profile, VAEntrypointEncSlice))
1454 if ((rt_format & gst_va_encoder_get_rtformat (self->encoder,
1455 profile, VAEntrypointEncSlice)) == 0)
1458 self->profile = profile;
1459 self->entrypoint = VAEntrypointEncSlice;
1460 self->rt_format = rt_format;
1465 /* Just use the first HW available profile and disable features if
1467 profile_name = NULL;
1468 for (i = 0; i < candidates->len; i++) {
1469 profile_name = g_ptr_array_index (candidates, i);
1470 profile = gst_va_profile_from_name (H264, profile_name);
1471 if (profile == VAProfileNone)
1474 if (!gst_va_encoder_has_profile_and_entrypoint (self->encoder,
1475 profile, VAEntrypointEncSlice))
1478 if ((rt_format & gst_va_encoder_get_rtformat (self->encoder,
1479 profile, VAEntrypointEncSlice)) == 0)
1482 self->profile = profile;
1483 self->entrypoint = VAEntrypointEncSlice;
1484 self->rt_format = rt_format;
1491 if (self->use_dct8x8 && !g_strstr_len (profile_name, -1, "high")) {
1492 GST_INFO_OBJECT (self, "Disable dct8x8, profile %s does not support it",
1493 gst_va_profile_name (self->profile));
1494 self->use_dct8x8 = FALSE;
1495 self->prop.use_dct8x8 = FALSE;
1496 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_DCT8X8]);
1499 if (self->use_cabac && (!g_strstr_len (profile_name, -1, "main")
1500 && !g_strstr_len (profile_name, -1, "high"))) {
1501 GST_INFO_OBJECT (self, "Disable cabac, profile %s does not support it",
1502 gst_va_profile_name (self->profile));
1503 self->use_cabac = FALSE;
1504 self->prop.use_cabac = FALSE;
1505 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_CABAC]);
1508 if (self->gop.num_bframes > 0 && g_strstr_len (profile_name, -1, "baseline")) {
1509 GST_INFO_OBJECT (self, "No B frames, profile %s does not support it",
1510 gst_va_profile_name (self->profile));
1511 self->gop.num_bframes = 0;
1512 self->gop.b_pyramid = 0;
1516 g_clear_pointer (&candidates, g_ptr_array_unref);
1517 g_clear_pointer (&allowed_caps, gst_caps_unref);
1520 GST_INFO_OBJECT (self, "Select the profile %s",
1521 gst_va_profile_name (profile));
1523 GST_ERROR_OBJECT (self, "Failed to find an available profile");
1529 /* Clear all the info of last reconfig and set the fields based on
1530 * property. The reconfig may change these fields because of the
1531 * profile/level and HW limitation. */
1533 gst_va_h264_enc_reset_state (GstVaH264Enc * self)
1537 self->profile = VAProfileNone;
1538 self->entrypoint = 0;
1539 self->rt_format = 0;
1540 self->codedbuf_size = 0;
1542 self->frame_duration = GST_CLOCK_TIME_NONE;
1543 self->input_frame_count = 0;
1544 self->output_frame_count = 0;
1546 self->level_idc = 0;
1547 self->level_str = NULL;
1549 self->mb_height = 0;
1550 self->use_cabac = self->prop.use_cabac;
1551 self->use_dct8x8 = self->prop.use_dct8x8;
1552 self->use_trellis = self->prop.use_trellis;
1553 self->num_slices = self->prop.num_slices;
1555 self->gop.idr_period = self->prop.key_int_max;
1556 self->gop.i_period = 0;
1557 self->gop.total_idr_count = 0;
1558 self->gop.ip_period = 0;
1559 self->gop.num_bframes = self->prop.num_bframes;
1560 self->gop.b_pyramid = self->prop.b_pyramid;
1561 self->gop.highest_pyramid_level = 0;
1562 self->gop.num_iframes = self->prop.num_iframes;
1563 memset (self->gop.frame_types, 0, sizeof (self->gop.frame_types));
1564 self->gop.cur_frame_index = 0;
1565 self->gop.cur_frame_num = 0;
1566 self->gop.max_frame_num = 0;
1567 self->gop.log2_max_frame_num = 0;
1568 self->gop.max_pic_order_cnt = 0;
1569 self->gop.log2_max_pic_order_cnt = 0;
1570 self->gop.num_ref_frames = self->prop.num_ref_frames;
1571 self->gop.ref_num_list0 = 0;
1572 self->gop.ref_num_list1 = 0;
1573 self->gop.num_reorder_frames = 0;
1575 self->rc.rc_ctrl_mode = self->prop.rc_ctrl;
1576 self->rc.min_qp = self->prop.min_qp;
1577 self->rc.max_qp = self->prop.max_qp;
1578 self->rc.qp_i = self->prop.qp_i;
1579 self->rc.qp_p = self->prop.qp_p;
1580 self->rc.qp_b = self->prop.qp_b;
1581 self->rc.mbbrc = self->prop.mbbrc;
1582 self->rc.max_bitrate = 0;
1583 self->rc.target_bitrate = 0;
1584 self->rc.target_percentage = self->prop.target_percentage;
1585 self->rc.target_usage = self->prop.target_usage;
1586 self->rc.max_bitrate_bits = 0;
1587 self->rc.target_bitrate_bits = 0;
1588 self->rc.cpb_size = self->prop.cpb_size;
1589 self->rc.cpb_length_bits = 0;
1591 memset (&self->sequence_hdr, 0, sizeof (GstH264SPS));
1595 gst_va_h264_enc_reconfig (GstVaH264Enc * self)
1597 gst_va_h264_enc_reset_state (self);
1599 self->width = GST_VIDEO_INFO_WIDTH (&self->in_info);
1600 self->height = GST_VIDEO_INFO_HEIGHT (&self->in_info);
1602 self->mb_width = GST_ROUND_UP_16 (self->width) / 16;
1603 self->mb_height = GST_ROUND_UP_16 (self->height) / 16;
1605 /* Frame rate is needed for rate control and PTS setting. */
1606 if (GST_VIDEO_INFO_FPS_N (&self->in_info) == 0
1607 || GST_VIDEO_INFO_FPS_D (&self->in_info) == 0) {
1608 GST_INFO_OBJECT (self, "Unknown framerate, just set to 30 fps");
1609 GST_VIDEO_INFO_FPS_N (&self->in_info) = 30;
1610 GST_VIDEO_INFO_FPS_D (&self->in_info) = 1;
1612 self->frame_duration = gst_util_uint64_scale (GST_SECOND,
1613 GST_VIDEO_INFO_FPS_D (&self->in_info),
1614 GST_VIDEO_INFO_FPS_N (&self->in_info));
1616 GST_DEBUG_OBJECT (self, "resolution:%dx%d, MB size: %dx%d,"
1617 " frame duration is %" GST_TIME_FORMAT,
1618 self->width, self->height, self->mb_width, self->mb_height,
1619 GST_TIME_ARGS (self->frame_duration));
1621 if (!_decide_profile (self))
1624 _validate_parameters (self);
1626 _ensure_rate_control (self);
1628 if (!_calculate_level (self))
1631 _generate_gop_structure (self);
1632 _calculate_coded_size (self);
1635 /* num_bframes are modified several times before */
1636 if (self->prop.num_bframes != self->gop.num_bframes) {
1637 self->prop.num_bframes = self->gop.num_bframes;
1638 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_BFRAMES]);
1641 if (self->prop.b_pyramid != self->gop.b_pyramid) {
1642 self->prop.b_pyramid = self->gop.b_pyramid;
1643 g_object_notify_by_pspec (G_OBJECT (self), properties[PROP_B_PYRAMID]);
1646 if (!_init_packed_headers (self))
1653 gst_va_h264_enc_push_frame (GstVaH264Enc * self,
1654 GstVaH264EncFrame * frame, gboolean last)
1656 g_return_val_if_fail (self->gop.cur_frame_index <= self->gop.idr_period,
1660 /* Begin a new GOP, should have a empty reorder_list. */
1661 if (self->gop.cur_frame_index == self->gop.idr_period) {
1662 g_assert (g_queue_is_empty (&self->reorder_list));
1663 self->gop.cur_frame_index = 0;
1664 self->gop.cur_frame_num = 0;
1668 ((self->gop.cur_frame_index * 2) % self->gop.max_pic_order_cnt);
1670 if (self->gop.cur_frame_index == 0) {
1671 g_assert (frame->poc == 0);
1672 GST_LOG_OBJECT (self, "system_frame_number: %d, an IDR frame, starts"
1673 " a new GOP", frame->frame->system_frame_number);
1675 g_queue_clear_full (&self->ref_list,
1676 (GDestroyNotify) gst_mini_object_unref);
1679 frame->type = self->gop.frame_types[self->gop.cur_frame_index].slice_type;
1680 frame->is_ref = self->gop.frame_types[self->gop.cur_frame_index].is_ref;
1681 frame->pyramid_level =
1682 self->gop.frame_types[self->gop.cur_frame_index].pyramid_level;
1683 frame->left_ref_poc_diff =
1684 self->gop.frame_types[self->gop.cur_frame_index].left_ref_poc_diff;
1685 frame->right_ref_poc_diff =
1686 self->gop.frame_types[self->gop.cur_frame_index].right_ref_poc_diff;
1688 if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame->frame)) {
1689 GST_DEBUG_OBJECT (self, "system_frame_number: %d, a force key frame,"
1690 " promote its type from %s to %s", frame->frame->system_frame_number,
1691 _slice_type_name (frame->type), _slice_type_name (GST_H264_I_SLICE));
1692 frame->type = GST_H264_I_SLICE;
1693 frame->is_ref = TRUE;
1696 GST_LOG_OBJECT (self, "Push frame, system_frame_number: %d, poc %d, "
1697 "frame type %s", frame->frame->system_frame_number, frame->poc,
1698 _slice_type_name (frame->type));
1700 self->gop.cur_frame_index++;
1701 g_queue_push_tail (&self->reorder_list, frame);
1704 /* ensure the last one a non-B and end the GOP. */
1705 if (last && self->gop.cur_frame_index < self->gop.idr_period) {
1706 GstVaH264EncFrame *last_frame;
1708 /* Ensure next push will start a new GOP. */
1709 self->gop.cur_frame_index = self->gop.idr_period;
1711 if (!g_queue_is_empty (&self->reorder_list)) {
1712 last_frame = g_queue_peek_tail (&self->reorder_list);
1713 if (last_frame->type == GST_H264_B_SLICE) {
1714 last_frame->type = GST_H264_P_SLICE;
1715 last_frame->is_ref = TRUE;
1723 struct RefFramesCount
1730 _count_backward_ref_num (gpointer data, gpointer user_data)
1732 GstVaH264EncFrame *frame = (GstVaH264EncFrame *) data;
1733 struct RefFramesCount *count = (struct RefFramesCount *) user_data;
1735 g_assert (frame->poc != count->poc);
1736 if (frame->poc > count->poc)
1740 static GstVaH264EncFrame *
1741 _pop_pyramid_b_frame (GstVaH264Enc * self)
1744 GstVaH264EncFrame *b_frame;
1746 struct RefFramesCount count;
1748 g_assert (self->gop.ref_num_list1 == 1);
1750 /* Find the lowest level with smallest poc. */
1752 for (i = 0; i < g_queue_get_length (&self->reorder_list); i++) {
1753 GstVaH264EncFrame *f;
1755 f = g_queue_peek_nth (&self->reorder_list, i);
1763 if (b_frame->pyramid_level > f->pyramid_level) {
1769 if (b_frame->poc > f->poc) {
1776 /* Check whether its refs are already poped. */
1777 g_assert (b_frame->left_ref_poc_diff != 0);
1778 g_assert (b_frame->right_ref_poc_diff != 0);
1779 for (i = 0; i < g_queue_get_length (&self->reorder_list); i++) {
1780 GstVaH264EncFrame *f;
1782 f = g_queue_peek_nth (&self->reorder_list, i);
1787 if (f->poc == b_frame->poc + b_frame->left_ref_poc_diff ||
1788 f->poc == b_frame->poc + b_frame->right_ref_poc_diff) {
1795 /* Ensure we already have enough backward refs */
1797 count.poc = b_frame->poc;
1798 g_queue_foreach (&self->ref_list, (GFunc) _count_backward_ref_num, &count);
1799 if (count.num >= self->gop.ref_num_list1) {
1800 GstVaH264EncFrame *f;
1802 f = g_queue_pop_nth (&self->reorder_list, index);
1803 g_assert (f == b_frame);
1812 gst_va_h264_enc_pop_frame (GstVaH264Enc * self, GstVaH264EncFrame ** out_frame)
1814 GstVaH264EncFrame *frame;
1815 struct RefFramesCount count;
1817 g_return_val_if_fail (self->gop.cur_frame_index <= self->gop.idr_period,
1822 if (g_queue_is_empty (&self->reorder_list))
1825 /* Return the last pushed non-B immediately. */
1826 frame = g_queue_peek_tail (&self->reorder_list);
1827 if (frame->type != GST_H264_B_SLICE) {
1828 *out_frame = g_queue_pop_tail (&self->reorder_list);
1832 if (self->gop.b_pyramid) {
1833 frame = _pop_pyramid_b_frame (self);
1841 g_assert (self->gop.ref_num_list1 > 0);
1843 /* If GOP end, pop anyway. */
1844 if (self->gop.cur_frame_index == self->gop.idr_period) {
1845 *out_frame = g_queue_pop_head (&self->reorder_list);
1849 /* Ensure we already have enough backward refs */
1850 frame = g_queue_peek_head (&self->reorder_list);
1852 count.poc = frame->poc;
1853 g_queue_foreach (&self->ref_list, (GFunc) _count_backward_ref_num, &count);
1854 if (count.num >= self->gop.ref_num_list1) {
1855 *out_frame = g_queue_pop_head (&self->reorder_list);
1862 g_assert (self->gop.cur_frame_num < self->gop.max_frame_num);
1864 (*out_frame)->frame_num = self->gop.cur_frame_num;
1866 /* Add the frame number for ref frames. */
1867 if ((*out_frame)->is_ref)
1868 self->gop.cur_frame_num++;
1870 if ((*out_frame)->frame_num == 0)
1871 self->gop.total_idr_count++;
1873 if (self->gop.b_pyramid && (*out_frame)->type == GST_H264_B_SLICE) {
1874 GST_LOG_OBJECT (self, "pop a pyramid B frame with system_frame_number:"
1875 " %d, poc: %d, frame num: %d, is_ref: %s, level %d",
1876 (*out_frame)->frame->system_frame_number, (*out_frame)->poc,
1877 (*out_frame)->frame_num, (*out_frame)->is_ref ? "true" : "false",
1878 (*out_frame)->pyramid_level);
1880 GST_LOG_OBJECT (self, "pop a frame with system_frame_number: %d,"
1881 " frame type: %s, poc: %d, frame num: %d, is_ref: %s",
1882 (*out_frame)->frame->system_frame_number,
1883 _slice_type_name ((*out_frame)->type),
1884 (*out_frame)->poc, (*out_frame)->frame_num,
1885 (*out_frame)->is_ref ? "true" : "false");
1890 static inline gboolean
1891 _fill_sps (GstVaH264Enc * self, VAEncSequenceParameterBufferH264 * seq_param)
1893 GstH264Profile profile;
1894 guint32 constraint_set0_flag, constraint_set1_flag;
1895 guint32 constraint_set2_flag, constraint_set3_flag;
1896 guint32 max_dec_frame_buffering;
1898 /* let max_num_ref_frames <= MaxDpbFrames. */
1899 max_dec_frame_buffering =
1900 MIN (self->gop.num_ref_frames + 1 /* Last frame before bump */ ,
1901 16 /* DPB_MAX_SIZE */ );
1903 constraint_set0_flag = 0;
1904 constraint_set1_flag = 0;
1905 constraint_set2_flag = 0;
1906 constraint_set3_flag = 0;
1908 switch (self->profile) {
1909 case VAProfileH264ConstrainedBaseline:
1910 profile = GST_H264_PROFILE_BASELINE;
1911 /* A.2.1 (baseline profile constraints) */
1912 constraint_set0_flag = 1;
1913 constraint_set1_flag = 1;
1915 case VAProfileH264Main:
1916 profile = GST_H264_PROFILE_MAIN;
1917 /* A.2.2 (main profile constraints) */
1918 constraint_set1_flag = 1;
1920 case VAProfileH264High:
1921 case VAProfileH264MultiviewHigh:
1922 case VAProfileH264StereoHigh:
1923 profile = GST_H264_PROFILE_HIGH;
1929 /* seq_scaling_matrix_present_flag not supported now */
1930 g_assert (seq_param->seq_fields.bits.seq_scaling_matrix_present_flag == 0);
1931 /* pic_order_cnt_type only support 0 now */
1932 g_assert (seq_param->seq_fields.bits.pic_order_cnt_type == 0);
1933 /* only progressive frames encoding is supported now */
1934 g_assert (seq_param->seq_fields.bits.frame_mbs_only_flag);
1937 GST_DEBUG_OBJECT (self, "filling SPS");
1938 self->sequence_hdr = (GstH264SPS) {
1940 .profile_idc = profile,
1941 .constraint_set0_flag = constraint_set0_flag,
1942 .constraint_set1_flag = constraint_set1_flag,
1943 .constraint_set2_flag = constraint_set2_flag,
1944 .constraint_set3_flag = constraint_set3_flag,
1945 .level_idc = self->level_idc,
1947 .chroma_format_idc = seq_param->seq_fields.bits.chroma_format_idc,
1948 .bit_depth_luma_minus8 = seq_param->bit_depth_luma_minus8,
1949 .bit_depth_chroma_minus8 = seq_param->bit_depth_chroma_minus8,
1951 .log2_max_frame_num_minus4 =
1952 seq_param->seq_fields.bits.log2_max_frame_num_minus4,
1953 .pic_order_cnt_type = seq_param->seq_fields.bits.pic_order_cnt_type,
1954 .log2_max_pic_order_cnt_lsb_minus4 =
1955 seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4,
1957 .num_ref_frames = seq_param->max_num_ref_frames,
1958 .gaps_in_frame_num_value_allowed_flag = 0,
1959 .pic_width_in_mbs_minus1 = seq_param->picture_width_in_mbs - 1,
1960 .pic_height_in_map_units_minus1 =
1961 (seq_param->seq_fields.bits.frame_mbs_only_flag ?
1962 seq_param->picture_height_in_mbs - 1 :
1963 seq_param->picture_height_in_mbs / 2 - 1),
1964 .frame_mbs_only_flag = seq_param->seq_fields.bits.frame_mbs_only_flag,
1965 .mb_adaptive_frame_field_flag = 0,
1966 .direct_8x8_inference_flag =
1967 seq_param->seq_fields.bits.direct_8x8_inference_flag,
1968 .frame_cropping_flag = seq_param->frame_cropping_flag,
1969 .frame_crop_left_offset = seq_param->frame_crop_left_offset,
1970 .frame_crop_right_offset = seq_param->frame_crop_right_offset,
1971 .frame_crop_top_offset = seq_param->frame_crop_top_offset,
1972 .frame_crop_bottom_offset = seq_param->frame_crop_bottom_offset,
1974 .vui_parameters_present_flag = seq_param->vui_parameters_present_flag,
1976 .aspect_ratio_info_present_flag =
1977 seq_param->vui_fields.bits.aspect_ratio_info_present_flag,
1978 .aspect_ratio_idc = seq_param->aspect_ratio_idc,
1979 .sar_width = seq_param->sar_width,
1980 .sar_height = seq_param->sar_height,
1981 .overscan_info_present_flag = 0,
1982 .overscan_appropriate_flag = 0,
1983 .chroma_loc_info_present_flag = 0,
1984 .timing_info_present_flag =
1985 seq_param->vui_fields.bits.timing_info_present_flag,
1986 .num_units_in_tick = seq_param->num_units_in_tick,
1987 .time_scale = seq_param->time_scale,
1988 .fixed_frame_rate_flag = seq_param->vui_fields.bits.fixed_frame_rate_flag,
1990 /* We do not write hrd and no need for buffering period SEI. */
1991 .nal_hrd_parameters_present_flag = 0,
1992 .vcl_hrd_parameters_present_flag = 0,
1994 .low_delay_hrd_flag = seq_param->vui_fields.bits.low_delay_hrd_flag,
1995 .pic_struct_present_flag = 1,
1996 .bitstream_restriction_flag =
1997 seq_param->vui_fields.bits.bitstream_restriction_flag,
1998 .motion_vectors_over_pic_boundaries_flag =
1999 seq_param->vui_fields.bits.motion_vectors_over_pic_boundaries_flag,
2000 .max_bytes_per_pic_denom = 2,
2001 .max_bits_per_mb_denom = 1,
2002 .log2_max_mv_length_horizontal =
2003 seq_param->vui_fields.bits.log2_max_mv_length_horizontal,
2004 .log2_max_mv_length_vertical =
2005 seq_param->vui_fields.bits.log2_max_mv_length_vertical,
2006 .num_reorder_frames = self->gop.num_reorder_frames,
2007 .max_dec_frame_buffering = max_dec_frame_buffering,
2016 _add_sequence_header (GstVaH264Enc * self, GstVaH264EncFrame * frame)
2019 #define SPS_SIZE 4 + GST_ROUND_UP_8 (MAX_SPS_HDR_SIZE + MAX_VUI_PARAMS_SIZE + \
2020 2 * MAX_HRD_PARAMS_SIZE) / 8
2021 guint8 packed_sps[SPS_SIZE] = { 0, };
2024 size = sizeof (packed_sps);
2025 if (gst_h264_bit_writer_sps (&self->sequence_hdr, TRUE, packed_sps,
2026 &size) != GST_H264_BIT_WRITER_OK) {
2027 GST_ERROR_OBJECT (self, "Failed to generate the sequence header");
2031 if (!gst_va_encoder_add_packed_header (self->encoder, frame->picture,
2032 VAEncPackedHeaderSequence, packed_sps, size, FALSE)) {
2033 GST_ERROR_OBJECT (self, "Failed to add the packed sequence header");
2041 _fill_sequence_param (GstVaH264Enc * self,
2042 VAEncSequenceParameterBufferH264 * sequence)
2044 gboolean direct_8x8_inference_flag = TRUE;
2046 g_assert (self->gop.log2_max_frame_num >= 4);
2047 g_assert (self->gop.log2_max_pic_order_cnt >= 4);
2049 /* A.2.3 Extended profile:
2050 * Sequence parameter sets shall have direct_8x8_inference_flag
2053 * A.3.3 Profile-specific level limits:
2054 * direct_8x8_inference_flag is not relevant to the Baseline,
2055 * Constrained Baseline, Constrained High, High 10 Intra, High 4:2:2
2056 * Intra, High 4:4:4 Intra, and CAVLC 4:4:4 Intra profiles as these
2057 * profiles do not allow B slice types, and
2058 * direct_8x8_inference_flag is equal to 1 for all levels of the
2059 * Extended profile. Table A-4. We only have constrained baseline
2061 if (self->profile == VAProfileH264ConstrainedBaseline)
2062 direct_8x8_inference_flag = FALSE;
2065 *sequence = (VAEncSequenceParameterBufferH264) {
2066 .seq_parameter_set_id = 0,
2067 .level_idc = self->level_idc,
2069 self->gop.i_period > 0 ? self->gop.i_period : self->gop.idr_period,
2070 .intra_idr_period = self->gop.idr_period,
2071 .ip_period = self->gop.ip_period,
2072 .bits_per_second = self->rc.target_bitrate_bits,
2073 .max_num_ref_frames = self->gop.num_ref_frames,
2074 .picture_width_in_mbs = self->mb_width,
2075 .picture_height_in_mbs = self->mb_height,
2077 .seq_fields.bits = {
2078 /* Only support 4:2:0 now. */
2079 .chroma_format_idc = 1,
2080 .frame_mbs_only_flag = 1,
2081 .mb_adaptive_frame_field_flag = FALSE,
2082 .seq_scaling_matrix_present_flag = FALSE,
2083 .direct_8x8_inference_flag = direct_8x8_inference_flag,
2084 .log2_max_frame_num_minus4 = self->gop.log2_max_frame_num - 4,
2085 .pic_order_cnt_type = 0,
2086 .log2_max_pic_order_cnt_lsb_minus4 = self->gop.log2_max_pic_order_cnt - 4,
2088 .bit_depth_luma_minus8 = 0,
2089 .bit_depth_chroma_minus8 = 0,
2091 .vui_parameters_present_flag = TRUE,
2092 .vui_fields.bits = {
2093 .aspect_ratio_info_present_flag = TRUE,
2094 .timing_info_present_flag = TRUE,
2095 .bitstream_restriction_flag = TRUE,
2096 .log2_max_mv_length_horizontal = 15,
2097 .log2_max_mv_length_vertical = 15,
2098 .fixed_frame_rate_flag = 1,
2099 .low_delay_hrd_flag = 0,
2100 .motion_vectors_over_pic_boundaries_flag = TRUE,
2102 .aspect_ratio_idc = 0xff,
2103 /* FIXME: what if no framerate info is provided */
2104 .sar_width = GST_VIDEO_INFO_PAR_N (&self->in_info),
2105 .sar_height = GST_VIDEO_INFO_PAR_D (&self->in_info),
2106 .num_units_in_tick = GST_VIDEO_INFO_FPS_D (&self->in_info),
2107 .time_scale = GST_VIDEO_INFO_FPS_N (&self->in_info) * 2,
2111 /* frame_cropping_flag */
2112 if (self->width & 15 || self->height & 15) {
2113 static const guint SubWidthC[] = { 1, 2, 2, 1 };
2114 static const guint SubHeightC[] = { 1, 2, 1, 1 };
2115 const guint CropUnitX =
2116 SubWidthC[sequence->seq_fields.bits.chroma_format_idc];
2117 const guint CropUnitY =
2118 SubHeightC[sequence->seq_fields.bits.chroma_format_idc] *
2119 (2 - sequence->seq_fields.bits.frame_mbs_only_flag);
2121 sequence->frame_cropping_flag = 1;
2122 sequence->frame_crop_left_offset = 0;
2123 sequence->frame_crop_right_offset = (16 * self->mb_width -
2124 self->width) / CropUnitX;
2125 sequence->frame_crop_top_offset = 0;
2126 sequence->frame_crop_bottom_offset = (16 * self->mb_height -
2127 self->height) / CropUnitY;
2132 _add_sequence_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture,
2133 VAEncSequenceParameterBufferH264 * sequence)
2135 if (!gst_va_encoder_add_param (self->encoder, picture,
2136 VAEncSequenceParameterBufferType, sequence, sizeof (*sequence))) {
2137 GST_ERROR_OBJECT (self, "Failed to create the sequence parameter");
2145 _add_rate_control_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture)
2147 uint32_t window_size;
2148 struct VAEncMiscParameterRateControlWrap
2150 VAEncMiscParameterType type;
2151 VAEncMiscParameterRateControl rate_control;
2154 if (self->rc.rc_ctrl_mode == VA_RC_CQP)
2157 window_size = self->rc.rc_ctrl_mode == VA_RC_VBR ?
2158 self->rc.max_bitrate_bits / 2 : self->rc.max_bitrate_bits;
2161 rate_control = (struct VAEncMiscParameterRateControlWrap) {
2162 .type = VAEncMiscParameterTypeRateControl,
2164 .bits_per_second = self->rc.max_bitrate_bits,
2165 .target_percentage = self->rc.target_percentage,
2166 .window_size = window_size,
2167 .initial_qp = self->rc.qp_i,
2168 .min_qp = self->rc.min_qp,
2169 .max_qp = self->rc.max_qp,
2170 .rc_flags.bits.mb_rate_control = self->rc.mbbrc,
2171 .quality_factor = 0,
2176 if (!gst_va_encoder_add_param (self->encoder, picture,
2177 VAEncMiscParameterBufferType, &rate_control, sizeof (rate_control))) {
2178 GST_ERROR_OBJECT (self, "Failed to create the race control parameter");
2186 _add_hrd_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture)
2191 VAEncMiscParameterType type;
2192 VAEncMiscParameterHRD hrd;
2194 .type = VAEncMiscParameterTypeHRD,
2196 .buffer_size = self->rc.cpb_length_bits,
2197 .initial_buffer_fullness = self->rc.cpb_length_bits / 2,
2202 if (self->rc.rc_ctrl_mode == VA_RC_CQP || self->rc.rc_ctrl_mode == VA_RC_VCM)
2205 g_assert (self->rc.max_bitrate_bits > 0);
2208 if (!gst_va_encoder_add_param (self->encoder, picture,
2209 VAEncMiscParameterBufferType, &hrd, sizeof (hrd))) {
2210 GST_ERROR_OBJECT (self, "Failed to create the HRD parameter");
2218 _add_quality_level_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture)
2223 VAEncMiscParameterType type;
2224 VAEncMiscParameterBufferQualityLevel ql;
2226 .type = VAEncMiscParameterTypeQualityLevel,
2227 .ql.quality_level = self->rc.target_usage,
2231 if (self->rc.target_usage == 0)
2234 if (!gst_va_encoder_add_param (self->encoder, picture,
2235 VAEncMiscParameterBufferType, &quality_level,
2236 sizeof (quality_level))) {
2237 GST_ERROR_OBJECT (self, "Failed to create the quality level parameter");
2245 _add_frame_rate_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture)
2250 VAEncMiscParameterType type;
2251 VAEncMiscParameterFrameRate fr;
2253 .type = VAEncMiscParameterTypeFrameRate,
2254 /* denominator = framerate >> 16 & 0xffff;
2255 * numerator = framerate & 0xffff; */
2256 .fr.framerate = (GST_VIDEO_INFO_FPS_N (&self->in_info) & 0xffff) |
2257 ((GST_VIDEO_INFO_FPS_D (&self->in_info) & 0xffff) << 16)
2261 if (!gst_va_encoder_add_param (self->encoder, picture,
2262 VAEncMiscParameterBufferType, &framerate, sizeof (framerate))) {
2263 GST_ERROR_OBJECT (self, "Failed to create the frame rate parameter");
2271 _add_trellis_parameter (GstVaH264Enc * self, GstVaEncodePicture * picture)
2276 VAEncMiscParameterType type;
2277 VAEncMiscParameterQuantization tr;
2279 .type = VAEncMiscParameterTypeQuantization,
2280 .tr.quantization_flags.bits = {
2281 .disable_trellis = 0,
2282 .enable_trellis_I = 1,
2283 .enable_trellis_B = 1,
2284 .enable_trellis_P = 1,
2289 if (!self->use_trellis)
2292 if (!gst_va_encoder_add_param (self->encoder, picture,
2293 VAEncMiscParameterBufferType, &trellis, sizeof (trellis))) {
2294 GST_ERROR_OBJECT (self, "Failed to create the trellis parameter");
2301 static inline gboolean
2302 _fill_picture_parameter (GstVaH264Enc * self, GstVaH264EncFrame * frame,
2303 VAEncPictureParameterBufferH264 * pic_param)
2308 *pic_param = (VAEncPictureParameterBufferH264) {
2309 .CurrPic.picture_id = gst_va_encode_picture_get_reconstruct_surface (frame->picture),
2310 .CurrPic.TopFieldOrderCnt = frame->poc,
2311 .coded_buf = frame->picture->coded_buffer,
2312 /* Only support one sps and pps now. */
2313 .pic_parameter_set_id = 0,
2314 .seq_parameter_set_id = 0,
2315 /* means last encoding picture, EOS nal added. */
2316 .last_picture = frame->last_frame,
2317 .frame_num = frame->frame_num,
2319 .pic_init_qp = self->rc.qp_i,
2320 /* Use slice's these fields to control ref num. */
2321 .num_ref_idx_l0_active_minus1 = 0,
2322 .num_ref_idx_l1_active_minus1 = 0,
2323 .chroma_qp_index_offset = 0,
2324 .second_chroma_qp_index_offset = 0,
2325 /* picture fields */
2326 .pic_fields.bits.idr_pic_flag = (frame->frame_num == 0),
2327 .pic_fields.bits.reference_pic_flag = frame->is_ref,
2328 .pic_fields.bits.entropy_coding_mode_flag = self->use_cabac,
2329 .pic_fields.bits.weighted_pred_flag = 0,
2330 .pic_fields.bits.weighted_bipred_idc = 0,
2331 .pic_fields.bits.constrained_intra_pred_flag = 0,
2332 .pic_fields.bits.transform_8x8_mode_flag = self->use_dct8x8,
2333 /* enable debloking */
2334 .pic_fields.bits.deblocking_filter_control_present_flag = 1,
2335 .pic_fields.bits.redundant_pic_cnt_present_flag = 0,
2336 /* bottom_field_pic_order_in_frame_present_flag */
2337 .pic_fields.bits.pic_order_present_flag = 0,
2338 .pic_fields.bits.pic_scaling_matrix_present_flag = 0,
2342 /* Non I frame, construct reference list. */
2344 if (frame->type != GST_H264_I_SLICE) {
2345 GstVaH264EncFrame *f;
2347 if (g_queue_is_empty (&self->ref_list)) {
2348 GST_ERROR_OBJECT (self, "No reference found for frame type %s",
2349 _slice_type_name (frame->type));
2353 g_assert (g_queue_get_length (&self->ref_list) <= self->gop.num_ref_frames);
2355 /* ref frames in queue are already sorted by frame_num. */
2356 for (; i < g_queue_get_length (&self->ref_list); i++) {
2357 f = g_queue_peek_nth (&self->ref_list, i);
2359 pic_param->ReferenceFrames[i].picture_id =
2360 gst_va_encode_picture_get_reconstruct_surface (f->picture);
2361 pic_param->ReferenceFrames[i].TopFieldOrderCnt = f->poc;
2362 pic_param->ReferenceFrames[i].flags =
2363 VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2364 pic_param->ReferenceFrames[i].frame_idx = f->frame_num;
2368 pic_param->ReferenceFrames[i].picture_id = VA_INVALID_ID;
2374 _add_picture_parameter (GstVaH264Enc * self, GstVaH264EncFrame * frame,
2375 VAEncPictureParameterBufferH264 * pic_param)
2377 if (!gst_va_encoder_add_param (self->encoder, frame->picture,
2378 VAEncPictureParameterBufferType, pic_param,
2379 sizeof (VAEncPictureParameterBufferH264))) {
2380 GST_ERROR_OBJECT (self, "Failed to create the picture parameter");
2388 _fill_pps (VAEncPictureParameterBufferH264 * pic_param, GstH264SPS * sps,
2392 *pps = (GstH264PPS) {
2395 .entropy_coding_mode_flag =
2396 pic_param->pic_fields.bits.entropy_coding_mode_flag,
2397 .pic_order_present_flag =
2398 pic_param->pic_fields.bits.pic_order_present_flag,
2399 .num_slice_groups_minus1 = 0,
2401 .num_ref_idx_l0_active_minus1 = pic_param->num_ref_idx_l0_active_minus1,
2402 .num_ref_idx_l1_active_minus1 = pic_param->num_ref_idx_l1_active_minus1,
2404 .weighted_pred_flag = pic_param->pic_fields.bits.weighted_pred_flag,
2405 .weighted_bipred_idc = pic_param->pic_fields.bits.weighted_bipred_idc,
2406 .pic_init_qp_minus26 = pic_param->pic_init_qp - 26,
2407 .pic_init_qs_minus26 = 0,
2408 .chroma_qp_index_offset = pic_param->chroma_qp_index_offset,
2409 .deblocking_filter_control_present_flag =
2410 pic_param->pic_fields.bits.deblocking_filter_control_present_flag,
2411 .constrained_intra_pred_flag =
2412 pic_param->pic_fields.bits.constrained_intra_pred_flag,
2413 .redundant_pic_cnt_present_flag =
2414 pic_param->pic_fields.bits.redundant_pic_cnt_present_flag,
2415 .transform_8x8_mode_flag =
2416 pic_param->pic_fields.bits.transform_8x8_mode_flag,
2417 /* unsupport scaling lists */
2418 .pic_scaling_matrix_present_flag = 0,
2419 .second_chroma_qp_index_offset = pic_param->second_chroma_qp_index_offset,
2425 _add_picture_header (GstVaH264Enc * self, GstVaH264EncFrame * frame,
2428 #define PPS_SIZE 4 + GST_ROUND_UP_8 (MAX_PPS_HDR_SIZE) / 8
2429 guint8 packed_pps[PPS_SIZE] = { 0, };
2433 size = sizeof (packed_pps);
2434 if (gst_h264_bit_writer_pps (pps, TRUE, packed_pps,
2435 &size) != GST_H264_BIT_WRITER_OK) {
2436 GST_ERROR_OBJECT (self, "Failed to generate the picture header");
2440 if (!gst_va_encoder_add_packed_header (self->encoder, frame->picture,
2441 VAEncPackedHeaderPicture, packed_pps, size, FALSE)) {
2442 GST_ERROR_OBJECT (self, "Failed to add the packed picture header");
2450 _add_one_slice (GstVaH264Enc * self, GstVaH264EncFrame * frame,
2451 gint start_mb, gint mb_size,
2452 VAEncSliceParameterBufferH264 * slice,
2453 GstVaH264EncFrame * list0[16], guint list0_num,
2454 GstVaH264EncFrame * list1[16], guint list1_num)
2456 int8_t slice_qp_delta = 0;
2460 if (self->rc.rc_ctrl_mode == VA_RC_CQP) {
2461 if (frame->type == GST_H264_P_SLICE) {
2462 slice_qp_delta = self->rc.qp_p - self->rc.qp_i;
2463 } else if (frame->type == GST_H264_B_SLICE) {
2464 slice_qp_delta = (int8_t) (self->rc.qp_b - self->rc.qp_i);
2466 g_assert (slice_qp_delta <= 51 && slice_qp_delta >= -51);
2469 *slice = (VAEncSliceParameterBufferH264) {
2470 .macroblock_address = start_mb,
2471 .num_macroblocks = mb_size,
2472 .macroblock_info = VA_INVALID_ID,
2473 .slice_type = (uint8_t) frame->type,
2474 /* Only one parameter set supported now. */
2475 .pic_parameter_set_id = 0,
2476 .idr_pic_id = self->gop.total_idr_count,
2477 .pic_order_cnt_lsb = frame->poc,
2478 /* Not support top/bottom. */
2479 .delta_pic_order_cnt_bottom = 0,
2480 .delta_pic_order_cnt[0] = 0,
2481 .delta_pic_order_cnt[1] = 0,
2483 .direct_spatial_mv_pred_flag = TRUE,
2484 /* .num_ref_idx_active_override_flag = , */
2485 /* .num_ref_idx_l0_active_minus1 = , */
2486 /* .num_ref_idx_l1_active_minus1 = , */
2487 /* Set the reference list later. */
2489 .luma_log2_weight_denom = 0,
2490 .chroma_log2_weight_denom = 0,
2491 .luma_weight_l0_flag = 0,
2492 .chroma_weight_l0_flag = 0,
2493 .luma_weight_l1_flag = 0,
2494 .chroma_weight_l1_flag = 0,
2496 .cabac_init_idc = 0,
2497 /* Just use picture default setting. */
2498 .slice_qp_delta = slice_qp_delta,
2500 .disable_deblocking_filter_idc = 0,
2501 .slice_alpha_c0_offset_div2 = 2,
2502 .slice_beta_offset_div2 = 2,
2506 if (frame->type == GST_H264_B_SLICE || frame->type == GST_H264_P_SLICE) {
2507 slice->num_ref_idx_active_override_flag = (list0_num > 0 || list1_num > 0);
2508 slice->num_ref_idx_l0_active_minus1 = list0_num > 0 ? list0_num - 1 : 0;
2509 if (frame->type == GST_H264_B_SLICE)
2510 slice->num_ref_idx_l1_active_minus1 = list1_num > 0 ? list1_num - 1 : 0;
2514 if (frame->type != GST_H264_I_SLICE) {
2515 for (; i < list0_num; i++) {
2516 slice->RefPicList0[i].picture_id =
2517 gst_va_encode_picture_get_reconstruct_surface (list0[i]->picture);
2518 slice->RefPicList0[i].TopFieldOrderCnt = list0[i]->poc;
2519 slice->RefPicList0[i].flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2520 slice->RefPicList0[i].frame_idx = list0[i]->frame_num;
2523 for (; i < G_N_ELEMENTS (slice->RefPicList0); ++i) {
2524 slice->RefPicList0[i].picture_id = VA_INVALID_SURFACE;
2525 slice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
2529 if (frame->type == GST_H264_B_SLICE) {
2530 for (; i < list1_num; i++) {
2531 slice->RefPicList1[i].picture_id =
2532 gst_va_encode_picture_get_reconstruct_surface (list1[i]->picture);
2533 slice->RefPicList1[i].TopFieldOrderCnt = list1[i]->poc;
2534 slice->RefPicList1[i].flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2535 slice->RefPicList1[i].frame_idx = list1[i]->frame_num;
2538 for (; i < G_N_ELEMENTS (slice->RefPicList1); ++i) {
2539 slice->RefPicList1[i].picture_id = VA_INVALID_SURFACE;
2540 slice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
2543 if (!gst_va_encoder_add_param (self->encoder, frame->picture,
2544 VAEncSliceParameterBufferType, slice,
2545 sizeof (VAEncSliceParameterBufferH264))) {
2546 GST_ERROR_OBJECT (self, "Failed to create the slice parameter");
2554 _poc_asc_compare (const GstVaH264EncFrame ** a, const GstVaH264EncFrame ** b)
2556 return (*a)->poc - (*b)->poc;
2560 _poc_des_compare (const GstVaH264EncFrame ** a, const GstVaH264EncFrame ** b)
2562 return (*b)->poc - (*a)->poc;
2566 _frame_num_asc_compare (const GstVaH264EncFrame ** a,
2567 const GstVaH264EncFrame ** b)
2569 return (*a)->frame_num - (*b)->frame_num;
2573 _frame_num_des_compare (const GstVaH264EncFrame ** a,
2574 const GstVaH264EncFrame ** b)
2576 return (*b)->frame_num - (*a)->frame_num;
2579 /* If all the pic_num in the same order, OK. */
2581 _ref_list_need_reorder (GstVaH264EncFrame * list[16], guint list_num,
2590 for (i = 1; i < list_num; i++) {
2591 pic_num_diff = list[i]->frame_num - list[i - 1]->frame_num;
2592 g_assert (pic_num_diff != 0);
2594 if (pic_num_diff > 0 && !is_asc)
2597 if (pic_num_diff < 0 && is_asc)
2605 _insert_ref_pic_list_modification (GstH264SliceHdr * slice_hdr,
2606 GstVaH264EncFrame * list[16], guint list_num, gboolean is_asc)
2608 GstVaH264EncFrame *list_by_pic_num[16] = { };
2609 guint modification_num, i;
2610 GstH264RefPicListModification *ref_pic_list_modification = NULL;
2611 gint pic_num_diff, pic_num_lx_pred;
2613 memcpy (list_by_pic_num, list, sizeof (GstVaH264EncFrame *) * list_num);
2616 g_qsort_with_data (list_by_pic_num, list_num, sizeof (gpointer),
2617 (GCompareDataFunc) _frame_num_asc_compare, NULL);
2619 g_qsort_with_data (list_by_pic_num, list_num, sizeof (gpointer),
2620 (GCompareDataFunc) _frame_num_des_compare, NULL);
2623 modification_num = 0;
2624 for (i = 0; i < list_num; i++) {
2625 if (list_by_pic_num[i]->poc != list[i]->poc)
2626 modification_num = i + 1;
2628 g_assert (modification_num > 0);
2631 slice_hdr->ref_pic_list_modification_flag_l1 = 1;
2632 slice_hdr->n_ref_pic_list_modification_l1 =
2633 modification_num + 1 /* The end operation. */ ;
2634 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2636 slice_hdr->ref_pic_list_modification_flag_l0 = 1;
2637 slice_hdr->n_ref_pic_list_modification_l0 =
2638 modification_num + 1 /* The end operation. */ ;
2639 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2642 pic_num_lx_pred = slice_hdr->frame_num;
2643 for (i = 0; i < modification_num; i++) {
2644 pic_num_diff = list[i]->frame_num - pic_num_lx_pred;
2645 /* For the nex loop. */
2646 pic_num_lx_pred = list[i]->frame_num;
2648 g_assert (pic_num_diff != 0);
2650 if (pic_num_diff > 0) {
2651 ref_pic_list_modification->modification_of_pic_nums_idc = 1;
2652 ref_pic_list_modification->value.abs_diff_pic_num_minus1 =
2655 ref_pic_list_modification->modification_of_pic_nums_idc = 0;
2656 ref_pic_list_modification->value.abs_diff_pic_num_minus1 =
2657 (-pic_num_diff) - 1;
2660 ref_pic_list_modification++;
2663 ref_pic_list_modification->modification_of_pic_nums_idc = 3;
2667 _insert_ref_pic_marking_for_unused_frame (GstH264SliceHdr * slice_hdr,
2668 gint cur_frame_num, gint unused_frame_num)
2670 GstH264RefPicMarking *refpicmarking;
2672 slice_hdr->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = 1;
2673 slice_hdr->dec_ref_pic_marking.n_ref_pic_marking = 2;
2675 refpicmarking = &slice_hdr->dec_ref_pic_marking.ref_pic_marking[0];
2677 refpicmarking->memory_management_control_operation = 1;
2678 refpicmarking->difference_of_pic_nums_minus1 =
2679 cur_frame_num - unused_frame_num - 1;
2681 refpicmarking = &slice_hdr->dec_ref_pic_marking.ref_pic_marking[1];
2682 refpicmarking->memory_management_control_operation = 0;
2686 _add_slice_header (GstVaH264Enc * self, GstVaH264EncFrame * frame,
2687 GstH264PPS * pps, VAEncSliceParameterBufferH264 * slice,
2688 GstVaH264EncFrame * list0[16], guint list0_num,
2689 GstVaH264EncFrame * list1[16], guint list1_num)
2691 GstH264SliceHdr slice_hdr;
2693 GstH264NalUnitType nal_type = GST_H264_NAL_SLICE;
2694 #define SLICE_HDR_SIZE 4 + GST_ROUND_UP_8 (MAX_SLICE_HDR_SIZE) / 8
2695 guint8 packed_slice_hdr[SLICE_HDR_SIZE] = { 0, };
2696 #undef SLICE_HDR_SIZE
2698 if (frame->frame_num == 0)
2699 nal_type = GST_H264_NAL_SLICE_IDR;
2702 slice_hdr = (GstH264SliceHdr) {
2703 .first_mb_in_slice = slice->macroblock_address,
2704 .type = slice->slice_type,
2706 .frame_num = frame->frame_num,
2707 /* interlaced not supported now. */
2708 .field_pic_flag = 0,
2709 .bottom_field_flag = 0,
2710 .idr_pic_id = (frame->frame_num == 0 ? slice->idr_pic_id : 0),
2711 /* only pic_order_cnt_type 1 is supported now. */
2712 .pic_order_cnt_lsb = slice->pic_order_cnt_lsb,
2713 .delta_pic_order_cnt_bottom = slice->delta_pic_order_cnt_bottom,
2714 /* Only for B frame. */
2715 .direct_spatial_mv_pred_flag =
2716 (frame->type == GST_H264_B_SLICE ?
2717 slice->direct_spatial_mv_pred_flag : 0),
2719 .num_ref_idx_active_override_flag = slice->num_ref_idx_active_override_flag,
2720 .num_ref_idx_l0_active_minus1 = slice->num_ref_idx_l0_active_minus1,
2721 .num_ref_idx_l1_active_minus1 = slice->num_ref_idx_l1_active_minus1,
2722 /* Calculate it later. */
2723 .ref_pic_list_modification_flag_l0 = 0,
2724 .ref_pic_list_modification_flag_l1 = 0,
2725 /* We have weighted_pred_flag and weighted_bipred_idc 0 here, no
2726 * need weight_table. */
2728 .dec_ref_pic_marking = {
2729 .no_output_of_prior_pics_flag = 0,
2730 .long_term_reference_flag = 0,
2731 /* If not sliding_window, we set it later. */
2732 .adaptive_ref_pic_marking_mode_flag = 0,
2735 .cabac_init_idc = slice->cabac_init_idc,
2736 .slice_qp_delta = slice->slice_qp_delta,
2738 .disable_deblocking_filter_idc = slice->disable_deblocking_filter_idc,
2739 .slice_alpha_c0_offset_div2 = slice->slice_alpha_c0_offset_div2,
2740 .slice_beta_offset_div2 = slice->slice_beta_offset_div2,
2744 /* Reorder the ref lists if needed. */
2745 if (list0_num > 1) {
2746 /* list0 is in poc descend order now. */
2747 if (_ref_list_need_reorder (list0, list0_num, FALSE))
2748 _insert_ref_pic_list_modification (&slice_hdr, list0, list0_num, FALSE);
2751 if (list0_num > 1) {
2752 /* list0 is in poc ascend order now. */
2753 if (_ref_list_need_reorder (list1, list1_num, TRUE)) {
2754 _insert_ref_pic_list_modification (&slice_hdr, list1, list1_num, TRUE);
2758 /* Mark the unused reference explicitly which this frame replaces. */
2759 if (frame->unused_for_reference_pic_num >= 0) {
2760 g_assert (frame->is_ref);
2761 _insert_ref_pic_marking_for_unused_frame (&slice_hdr, frame->frame_num,
2762 frame->unused_for_reference_pic_num);
2765 size = sizeof (packed_slice_hdr);
2766 if (gst_h264_bit_writer_slice_hdr (&slice_hdr, TRUE, nal_type, frame->is_ref,
2767 packed_slice_hdr, &size) != GST_H264_BIT_WRITER_OK) {
2768 GST_ERROR_OBJECT (self, "Failed to generate the slice header");
2772 if (!gst_va_encoder_add_packed_header (self->encoder, frame->picture,
2773 VAEncPackedHeaderSlice, packed_slice_hdr, size, FALSE)) {
2774 GST_ERROR_OBJECT (self, "Failed to add the packed slice header");
2782 _add_aud (GstVaH264Enc * self, GstVaH264EncFrame * frame)
2784 guint8 aud_data[8] = { };
2786 guint8 primary_pic_type = 0;
2788 switch (frame->type) {
2789 case GST_H264_I_SLICE:
2790 primary_pic_type = 0;
2792 case GST_H264_P_SLICE:
2793 primary_pic_type = 1;
2795 case GST_H264_B_SLICE:
2796 primary_pic_type = 2;
2799 g_assert_not_reached ();
2803 size = sizeof (aud_data);
2804 if (gst_h264_bit_writer_aud (primary_pic_type, TRUE, aud_data,
2805 &size) != GST_H264_BIT_WRITER_OK) {
2806 GST_ERROR_OBJECT (self, "Failed to generate the AUD");
2810 if (!gst_va_encoder_add_packed_header (self->encoder, frame->picture,
2811 VAEncPackedHeaderRawData, aud_data, size, FALSE)) {
2812 GST_ERROR_OBJECT (self, "Failed to add the AUD");
2820 gst_va_h264_enc_encode_frame (GstVaH264Enc * self, GstVaH264EncFrame * frame)
2822 VAEncPictureParameterBufferH264 pic_param;
2824 GstVaH264EncFrame *list0[16] = { NULL, };
2825 guint list0_num = 0;
2826 GstVaH264EncFrame *list1[16] = { NULL, };
2827 guint list1_num = 0;
2828 guint slice_of_mbs, slice_mod_mbs, slice_start_mb, slice_mbs;
2831 /* Repeat the SPS for IDR. */
2832 if (frame->poc == 0) {
2833 VAEncSequenceParameterBufferH264 sequence;
2835 if (!_add_rate_control_parameter (self, frame->picture))
2838 if (!_add_quality_level_parameter (self, frame->picture))
2841 if (!_add_frame_rate_parameter (self, frame->picture))
2844 if (!_add_hrd_parameter (self, frame->picture))
2847 if (!_add_trellis_parameter (self, frame->picture))
2850 _fill_sequence_param (self, &sequence);
2851 if (!_fill_sps (self, &sequence))
2854 if (!_add_sequence_parameter (self, frame->picture, &sequence))
2857 if ((self->packed_headers & VA_ENC_PACKED_HEADER_SEQUENCE)
2858 && !_add_sequence_header (self, frame))
2862 if (self->prop.aud) {
2863 if ((self->packed_headers & VA_ENC_PACKED_HEADER_RAW_DATA)
2864 && !_add_aud (self, frame))
2868 /* Non I frame, construct reference list. */
2869 if (frame->type != GST_H264_I_SLICE) {
2870 GstVaH264EncFrame *f;
2872 for (i = g_queue_get_length (&self->ref_list) - 1; i >= 0; i--) {
2873 f = g_queue_peek_nth (&self->ref_list, i);
2874 if (f->poc > frame->poc)
2877 list0[list0_num] = f;
2881 /* reorder to select the most nearest forward frames. */
2882 g_qsort_with_data (list0, list0_num, sizeof (gpointer),
2883 (GCompareDataFunc) _poc_des_compare, NULL);
2885 if (list0_num > self->gop.ref_num_list0)
2886 list0_num = self->gop.ref_num_list0;
2889 if (frame->type == GST_H264_B_SLICE) {
2890 GstVaH264EncFrame *f;
2892 for (i = 0; i < g_queue_get_length (&self->ref_list); i++) {
2893 f = g_queue_peek_nth (&self->ref_list, i);
2894 if (f->poc < frame->poc)
2897 list1[list1_num] = f;
2901 /* reorder to select the most nearest backward frames. */
2902 g_qsort_with_data (list1, list1_num, sizeof (gpointer),
2903 (GCompareDataFunc) _poc_asc_compare, NULL);
2905 if (list1_num > self->gop.ref_num_list1)
2906 list1_num = self->gop.ref_num_list1;
2909 g_assert (list0_num + list1_num <= self->gop.num_ref_frames);
2911 if (!_fill_picture_parameter (self, frame, &pic_param))
2913 if (!_add_picture_parameter (self, frame, &pic_param))
2915 _fill_pps (&pic_param, &self->sequence_hdr, &pps);
2917 if ((self->packed_headers & VA_ENC_PACKED_HEADER_PICTURE)
2918 && frame->type == GST_H264_I_SLICE
2919 && !_add_picture_header (self, frame, &pps))
2922 slice_of_mbs = self->mb_width * self->mb_height / self->num_slices;
2923 slice_mod_mbs = self->mb_width * self->mb_height % self->num_slices;
2926 for (i = 0; i < self->num_slices; i++) {
2927 VAEncSliceParameterBufferH264 slice;
2929 slice_mbs = slice_of_mbs;
2930 /* divide the remainder to each equally */
2931 if (slice_mod_mbs) {
2936 if (!_add_one_slice (self, frame, slice_start_mb, slice_mbs, &slice,
2937 list0, list0_num, list1, list1_num))
2940 if ((self->packed_headers & VA_ENC_PACKED_HEADER_SLICE) &&
2941 (!_add_slice_header (self, frame, &pps, &slice, list0, list0_num, list1,
2945 slice_start_mb += slice_mbs;
2948 if (!gst_va_encoder_encode (self->encoder, frame->picture)) {
2949 GST_ERROR_OBJECT (self, "Encode frame error");
2957 gst_va_h264_enc_start (GstVideoEncoder * encoder)
2959 GstVaH264Enc *self = GST_VA_H264_ENC (encoder);
2961 /* TODO: how to poll and wait for the encoded buffer. */
2962 self->preferred_output_delay = 0;
2964 /* Set the minimum pts to some huge value (1000 hours). This keeps
2965 * the dts at the start of the stream from needing to be
2967 self->start_pts = GST_SECOND * 60 * 60 * 1000;
2968 gst_video_encoder_set_min_pts (encoder, self->start_pts);
2974 gst_va_h264_enc_open (GstVideoEncoder * venc)
2976 GstVaH264Enc *encoder = GST_VA_H264_ENC (venc);
2977 GstVaH264EncClass *klass = GST_VA_H264_ENC_GET_CLASS (venc);
2978 gboolean ret = FALSE;
2980 if (!gst_va_ensure_element_data (venc, klass->render_device_path,
2984 if (!g_atomic_pointer_get (&encoder->encoder)) {
2985 GstVaEncoder *va_encoder;
2987 va_encoder = gst_va_encoder_new (encoder->display, klass->codec);
2991 gst_object_replace ((GstObject **) (&encoder->encoder),
2992 (GstObject *) va_encoder);
2993 gst_clear_object (&va_encoder);
3002 gst_va_h264_enc_close (GstVideoEncoder * venc)
3004 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3006 gst_va_h264_enc_reset_state (self);
3008 gst_clear_object (&self->encoder);
3009 gst_clear_object (&self->display);
3015 gst_va_h264_enc_get_caps (GstVideoEncoder * venc, GstCaps * filter)
3017 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3018 GstCaps *caps = NULL, *tmp;
3021 caps = gst_va_encoder_get_sinkpad_caps (self->encoder);
3025 tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
3026 gst_caps_unref (caps);
3030 caps = gst_video_encoder_proxy_getcaps (venc, NULL, filter);
3033 GST_LOG_OBJECT (self, "Returning caps %" GST_PTR_FORMAT, caps);
3038 _flush_all_frames (GstVideoEncoder * venc)
3040 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3042 g_queue_clear_full (&self->reorder_list,
3043 (GDestroyNotify) gst_mini_object_unref);
3044 g_queue_clear_full (&self->output_list,
3045 (GDestroyNotify) gst_mini_object_unref);
3046 g_queue_clear_full (&self->ref_list, (GDestroyNotify) gst_mini_object_unref);
3050 gst_va_h264_enc_flush (GstVideoEncoder * venc)
3052 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3054 _flush_all_frames (venc);
3056 /* begin from an IDR after flush. */
3057 self->gop.cur_frame_index = 0;
3058 self->gop.cur_frame_num = 0;
3064 gst_va_h264_enc_stop (GstVideoEncoder * venc)
3066 GstVaH264Enc *const self = GST_VA_H264_ENC (venc);
3068 _flush_all_frames (venc);
3070 if (!gst_va_encoder_close (self->encoder)) {
3071 GST_ERROR_OBJECT (self, "Failed to close the VA encoder");
3076 gst_buffer_pool_set_active (self->raw_pool, FALSE);
3077 gst_clear_object (&self->raw_pool);
3079 if (self->input_state)
3080 gst_video_codec_state_unref (self->input_state);
3081 self->input_state = NULL;
3082 if (self->output_state)
3083 gst_video_codec_state_unref (self->output_state);
3084 self->output_state = NULL;
3086 gst_clear_caps (&self->in_caps);
3092 _try_import_buffer (GstVaH264Enc * self, GstBuffer * inbuf)
3094 VASurfaceID surface;
3096 /* The VA buffer. */
3097 surface = gst_va_buffer_get_surface (inbuf);
3098 if (surface != VA_INVALID_ID)
3101 /* TODO: DMA buffer. */
3106 static GstBufferPool *
3107 _get_sinkpad_pool (GstVaH264Enc * self)
3109 GstAllocator *allocator;
3110 GstAllocationParams params = { 0, };
3111 guint size, usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_ENCODER;
3112 GArray *surface_formats = NULL;
3116 return self->raw_pool;
3118 g_assert (self->in_caps);
3119 caps = gst_caps_copy (self->in_caps);
3120 gst_caps_set_features_simple (caps,
3121 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_VA));
3123 gst_allocation_params_init (¶ms);
3125 size = GST_VIDEO_INFO_SIZE (&self->in_info);
3127 surface_formats = gst_va_encoder_get_surface_formats (self->encoder);
3129 allocator = gst_va_allocator_new (self->display, surface_formats);
3131 self->raw_pool = gst_va_pool_new_with_config (caps, size, 1, 0,
3132 usage_hint, allocator, ¶ms);
3133 if (!self->raw_pool) {
3134 gst_object_unref (allocator);
3138 gst_va_allocator_get_format (allocator, &self->sinkpad_info, NULL);
3140 gst_object_unref (allocator);
3142 gst_buffer_pool_set_active (self->raw_pool, TRUE);
3144 return self->raw_pool;
3147 static GstFlowReturn
3148 _import_input_buffer (GstVaH264Enc * self, GstBuffer * inbuf, GstBuffer ** buf)
3150 GstBuffer *buffer = NULL;
3151 GstBufferPool *pool;
3153 GstVideoFrame in_frame, out_frame;
3154 gboolean imported, copied;
3156 imported = _try_import_buffer (self, inbuf);
3158 *buf = gst_buffer_ref (inbuf);
3162 /* input buffer doesn't come from a vapool, thus it is required to
3163 * have a pool, grab from it a new buffer and copy the input
3164 * buffer to the new one */
3165 if (!(pool = _get_sinkpad_pool (self)))
3166 return GST_FLOW_ERROR;
3168 ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL);
3169 if (ret != GST_FLOW_OK)
3172 GST_LOG_OBJECT (self, "copying input frame");
3174 if (!gst_video_frame_map (&in_frame, &self->in_info, inbuf, GST_MAP_READ))
3175 goto invalid_buffer;
3176 if (!gst_video_frame_map (&out_frame, &self->sinkpad_info, buffer,
3178 gst_video_frame_unmap (&in_frame);
3179 goto invalid_buffer;
3182 copied = gst_video_frame_copy (&out_frame, &in_frame);
3184 gst_video_frame_unmap (&out_frame);
3185 gst_video_frame_unmap (&in_frame);
3188 goto invalid_buffer;
3190 /* strictly speaking this is not needed but let's play safe */
3191 if (!gst_buffer_copy_into (buffer, inbuf, GST_BUFFER_COPY_FLAGS |
3192 GST_BUFFER_COPY_TIMESTAMPS, 0, -1))
3193 return GST_FLOW_ERROR;
3201 GST_ELEMENT_WARNING (self, CORE, NOT_IMPLEMENTED, (NULL),
3202 ("invalid video buffer received"));
3204 gst_buffer_unref (buffer);
3205 return GST_FLOW_ERROR;
3209 static GstFlowReturn
3210 _push_buffer_to_downstream (GstVaH264Enc * self, GstVaH264EncFrame * frame_enc)
3212 GstVideoCodecFrame *frame;
3215 GstBuffer *buf = NULL;
3216 VASurfaceID surface;
3217 VADisplay dpy = gst_va_display_get_va_dpy (self->display);
3220 frame = frame_enc->frame;
3222 surface = gst_va_encode_picture_get_raw_surface (frame_enc->picture);
3223 status = vaSyncSurface (dpy, surface);
3224 if (status != VA_STATUS_SUCCESS) {
3225 GST_WARNING ("vaSyncSurface: %s", vaErrorStr (status));
3229 coded_size = gst_va_encode_picture_get_coded_size (frame_enc->picture);
3230 if (coded_size <= 0) {
3231 GST_ERROR_OBJECT (self, "Failed to get the coded size,");
3235 buf = gst_video_encoder_allocate_output_buffer (GST_VIDEO_ENCODER_CAST (self),
3238 GST_ERROR_OBJECT (self, "Failed to allocate output buffer, size %d",
3243 if (!gst_va_encode_picture_copy_coded_data (frame_enc->picture, buf)) {
3244 GST_ERROR_OBJECT (self, "Failed to copy output buffer, size %d",
3250 self->start_pts + self->frame_duration * frame_enc->total_frame_count;
3251 /* The PTS should always be later than the DTS. */
3252 frame->dts = self->start_pts + self->frame_duration *
3253 ((gint64) self->output_frame_count -
3254 (gint64) self->gop.num_reorder_frames);
3255 self->output_frame_count++;
3256 frame->duration = self->frame_duration;
3258 gst_clear_mini_object ((GstMiniObject **) & frame_enc);
3259 gst_buffer_replace (&frame->output_buffer, buf);
3260 gst_clear_buffer (&buf);
3262 GST_LOG_OBJECT (self, "Push to downstream: frame system_frame_number: %d,"
3263 " pts: %" GST_TIME_FORMAT ", dts: %" GST_TIME_FORMAT
3264 " duration: %" GST_TIME_FORMAT ", buffer size: %" G_GSIZE_FORMAT,
3265 frame->system_frame_number, GST_TIME_ARGS (frame->pts),
3266 GST_TIME_ARGS (frame->dts), GST_TIME_ARGS (frame->duration),
3267 gst_buffer_get_size (frame->output_buffer));
3269 ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame);
3273 gst_clear_mini_object ((GstMiniObject **) & frame_enc);
3274 gst_clear_buffer (&buf);
3275 gst_clear_buffer (&frame->output_buffer);
3276 gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame);
3277 return GST_FLOW_ERROR;
3281 _reorder_frame (GstVideoEncoder * venc, GstVaH264EncFrame * in_frame,
3282 gboolean bump_all, GstVaH264EncFrame ** out_frame)
3284 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3285 GstVaH264EncClass *klass = GST_VA_H264_ENC_GET_CLASS (self);
3286 GstVaH264EncFrame *frame_out = NULL;
3288 g_assert (klass->push_frame);
3289 if (!klass->push_frame (self, in_frame, bump_all)) {
3290 GST_ERROR_OBJECT (self, "Failed to push the input frame"
3291 " system_frame_number: %d into the reorder list",
3292 in_frame->frame->system_frame_number);
3298 g_assert (klass->pop_frame);
3299 if (!klass->pop_frame (self, &frame_out)) {
3300 GST_ERROR_OBJECT (self, "Failed to pop the frame from the reorder list");
3305 *out_frame = frame_out;
3310 _sort_by_frame_num (gconstpointer a, gconstpointer b, gpointer user_data)
3312 GstVaH264EncFrame *frame1 = (GstVaH264EncFrame *) a;
3313 GstVaH264EncFrame *frame2 = (GstVaH264EncFrame *) b;
3315 g_assert (frame1->frame_num != frame2->frame_num);
3317 return frame1->frame_num - frame2->frame_num;
3320 static GstVaH264EncFrame *
3321 _find_unused_reference_frame (GstVaH264Enc * self, GstVaH264EncFrame * frame)
3324 GstVaH264EncFrame *b_frame;
3326 /* We still have more space. */
3327 if (g_queue_get_length (&self->ref_list) < self->gop.num_ref_frames)
3330 /* Not b_pyramid, sliding window is enough. */
3331 if (!self->gop.b_pyramid)
3332 return g_queue_peek_head (&self->ref_list);
3334 /* I/P frame, just using sliding window. */
3335 if (frame->type != GST_H264_B_SLICE)
3336 return g_queue_peek_head (&self->ref_list);
3338 /* Choose the B frame with lowest POC. */
3340 for (i = 0; i < g_queue_get_length (&self->ref_list); i++) {
3341 GstVaH264EncFrame *f;
3343 f = g_queue_peek_nth (&self->ref_list, i);
3345 if (f->type != GST_H264_B_SLICE)
3353 g_assert (f->poc != b_frame->poc);
3354 if (f->poc < b_frame->poc)
3358 /* No B frame as ref. */
3360 return g_queue_peek_head (&self->ref_list);
3362 if (b_frame != g_queue_peek_head (&self->ref_list)) {
3363 frame->unused_for_reference_pic_num = b_frame->frame_num;
3364 GST_LOG_OBJECT (self, "The frame with POC: %d, pic_num %d will be"
3365 " replaced by the frame with POC: %d, pic_num %d explicitly by"
3366 " using memory_management_control_operation=1",
3367 b_frame->poc, b_frame->frame_num, frame->poc, frame->frame_num);
3373 static GstFlowReturn
3374 _encode_frame (GstVideoEncoder * venc, GstVaH264EncFrame * frame)
3376 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3377 GstVaH264EncClass *klass = GST_VA_H264_ENC_GET_CLASS (self);
3378 GstVaH264EncFrame *unused_ref = NULL;
3380 g_assert (frame->picture == NULL);
3381 frame->picture = gst_va_encode_picture_new (self->encoder,
3382 frame->frame->input_buffer);
3384 if (!frame->picture) {
3385 GST_ERROR_OBJECT (venc, "Failed to create the encode picture");
3386 return GST_FLOW_ERROR;
3390 unused_ref = _find_unused_reference_frame (self, frame);
3392 if (!klass->encode_frame (self, frame)) {
3393 GST_ERROR_OBJECT (venc, "Failed to encode the frame");
3394 return GST_FLOW_ERROR;
3397 g_queue_push_tail (&self->output_list, frame);
3399 if (frame->is_ref) {
3401 if (!g_queue_remove (&self->ref_list, unused_ref))
3402 g_assert_not_reached ();
3404 gst_mini_object_unref ((GstMiniObject *) unused_ref);
3407 /* Add it into the reference list. */
3408 gst_mini_object_ref ((GstMiniObject *) frame);
3409 g_queue_push_tail (&self->ref_list, frame);
3410 g_queue_sort (&self->ref_list, _sort_by_frame_num, NULL);
3412 g_assert (g_queue_get_length (&self->ref_list) <= self->gop.num_ref_frames);
3418 static GstFlowReturn
3419 gst_va_h264_enc_handle_frame (GstVideoEncoder * venc,
3420 GstVideoCodecFrame * frame)
3422 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3424 GstBuffer *in_buf = NULL;
3425 GstVaH264EncFrame *frame_in = NULL;
3426 GstVaH264EncFrame *frame_encode = NULL;
3427 GstVaH264EncFrame *frame_out = NULL;
3429 GST_LOG_OBJECT (venc,
3430 "handle frame id %d, dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT,
3431 frame->system_frame_number,
3432 GST_TIME_ARGS (GST_BUFFER_DTS (frame->input_buffer)),
3433 GST_TIME_ARGS (GST_BUFFER_PTS (frame->input_buffer)));
3435 ret = _import_input_buffer (self, frame->input_buffer, &in_buf);
3436 if (ret != GST_FLOW_OK)
3437 goto error_buffer_invalid;
3439 gst_buffer_replace (&frame->input_buffer, in_buf);
3440 gst_clear_buffer (&in_buf);
3442 frame_in = g_new (GstVaH264EncFrame, 1);
3443 gst_mini_object_init (GST_MINI_OBJECT_CAST (frame_in), 0,
3444 GST_TYPE_VA_H264_ENC_FRAME, NULL, NULL,
3445 (GstMiniObjectFreeFunction) gst_va_enc_frame_free);
3446 frame_in->last_frame = FALSE;
3447 frame_in->frame_num = 0;
3448 frame_in->unused_for_reference_pic_num = -1;
3449 frame_in->frame = gst_video_codec_frame_ref (frame);
3450 frame_in->picture = NULL;
3451 frame_in->total_frame_count = self->input_frame_count;
3452 self->input_frame_count++;
3454 if (!_reorder_frame (venc, frame_in, FALSE, &frame_encode))
3457 /* pass it to reorder list and we should not use it again. */
3461 while (frame_encode) {
3462 ret = _encode_frame (venc, frame_encode);
3463 if (ret != GST_FLOW_OK)
3466 while (g_queue_get_length (&self->output_list) >
3467 self->preferred_output_delay) {
3468 frame_out = g_queue_pop_head (&self->output_list);
3469 ret = _push_buffer_to_downstream (self, frame_out);
3470 if (ret != GST_FLOW_OK)
3471 goto error_push_buffer;
3474 frame_encode = NULL;
3475 if (!_reorder_frame (venc, NULL, FALSE, &frame_encode))
3481 error_buffer_invalid:
3483 GST_ELEMENT_ERROR (venc, STREAM, ENCODE,
3484 ("Failed to import the input frame."), (NULL));
3485 gst_clear_buffer (&in_buf);
3486 gst_clear_buffer (&frame->output_buffer);
3487 gst_video_encoder_finish_frame (venc, frame);
3492 GST_ELEMENT_ERROR (venc, STREAM, ENCODE,
3493 ("Failed to reorder the input frame."), (NULL));
3495 gst_clear_buffer (&frame_in->frame->output_buffer);
3496 gst_video_encoder_finish_frame (venc, frame_in->frame);
3498 gst_clear_mini_object ((GstMiniObject **) & frame_in);
3499 return GST_FLOW_ERROR;
3503 GST_ELEMENT_ERROR (venc, STREAM, ENCODE,
3504 ("Failed to encode the frame."), (NULL));
3505 gst_clear_buffer (&frame_encode->frame->output_buffer);
3506 gst_video_encoder_finish_frame (venc, frame_encode->frame);
3507 gst_clear_mini_object ((GstMiniObject **) & frame_encode);
3511 GST_ERROR_OBJECT (self, "Failed to push the buffer");
3515 static GstFlowReturn
3516 gst_va_h264_enc_drain (GstVideoEncoder * venc)
3518 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3519 GstFlowReturn ret = GST_FLOW_OK;
3520 GstVaH264EncFrame *frame_enc = NULL;
3522 GST_DEBUG_OBJECT (self, "Encoder is draining");
3524 /* Kickout all cached frames */
3525 if (!_reorder_frame (venc, NULL, TRUE, &frame_enc)) {
3526 ret = GST_FLOW_ERROR;
3527 goto error_and_purge_all;
3531 if (g_queue_is_empty (&self->reorder_list))
3532 frame_enc->last_frame = TRUE;
3534 ret = _encode_frame (venc, frame_enc);
3535 if (ret != GST_FLOW_OK)
3536 goto error_and_purge_all;
3538 frame_enc = g_queue_pop_head (&self->output_list);
3539 ret = _push_buffer_to_downstream (self, frame_enc);
3541 if (ret != GST_FLOW_OK)
3542 goto error_and_purge_all;
3545 if (!_reorder_frame (venc, NULL, TRUE, &frame_enc)) {
3546 ret = GST_FLOW_ERROR;
3547 goto error_and_purge_all;
3551 g_assert (g_queue_is_empty (&self->reorder_list));
3553 /* Output all frames. */
3554 while (!g_queue_is_empty (&self->output_list)) {
3555 frame_enc = g_queue_pop_head (&self->output_list);
3556 ret = _push_buffer_to_downstream (self, frame_enc);
3558 if (ret != GST_FLOW_OK)
3559 goto error_and_purge_all;
3562 /* Also clear the reference list. */
3563 g_queue_clear_full (&self->ref_list, (GDestroyNotify) gst_mini_object_unref);
3567 error_and_purge_all:
3569 gst_clear_buffer (&frame_enc->frame->output_buffer);
3570 gst_video_encoder_finish_frame (venc, frame_enc->frame);
3571 gst_clear_mini_object ((GstMiniObject **) & frame_enc);
3574 if (!g_queue_is_empty (&self->output_list)) {
3575 GST_WARNING_OBJECT (self, "Still %d frame in the output list"
3576 " after drain", g_queue_get_length (&self->output_list));
3577 while (!g_queue_is_empty (&self->output_list)) {
3578 frame_enc = g_queue_pop_head (&self->output_list);
3579 gst_clear_buffer (&frame_enc->frame->output_buffer);
3580 gst_video_encoder_finish_frame (venc, frame_enc->frame);
3581 gst_clear_mini_object ((GstMiniObject **) & frame_enc);
3585 if (!g_queue_is_empty (&self->reorder_list)) {
3586 GST_WARNING_OBJECT (self, "Still %d frame in the reorder list"
3587 " after drain", g_queue_get_length (&self->reorder_list));
3588 while (!g_queue_is_empty (&self->reorder_list)) {
3589 frame_enc = g_queue_pop_head (&self->reorder_list);
3590 gst_clear_buffer (&frame_enc->frame->output_buffer);
3591 gst_video_encoder_finish_frame (venc, frame_enc->frame);
3592 gst_clear_mini_object ((GstMiniObject **) & frame_enc);
3596 /* Also clear the reference list. */
3597 g_queue_clear_full (&self->ref_list, (GDestroyNotify) gst_mini_object_unref);
3602 static GstFlowReturn
3603 gst_va_h264_enc_finish (GstVideoEncoder * venc)
3605 return gst_va_h264_enc_drain (venc);
3608 static GstAllocator *
3609 _allocator_from_caps (GstVaH264Enc * self, GstCaps * caps)
3611 GstAllocator *allocator = NULL;
3613 if (gst_caps_is_dmabuf (caps)) {
3614 allocator = gst_va_dmabuf_allocator_new (self->display);
3616 GArray *surface_formats =
3617 gst_va_encoder_get_surface_formats (self->encoder);
3618 allocator = gst_va_allocator_new (self->display, surface_formats);
3625 gst_va_h264_enc_propose_allocation (GstVideoEncoder * venc, GstQuery * query)
3627 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3628 GstAllocator *allocator = NULL;
3629 GstAllocationParams params = { 0, };
3630 GstBufferPool *pool;
3633 gboolean need_pool = FALSE;
3634 guint size, usage_hint = VA_SURFACE_ATTRIB_USAGE_HINT_ENCODER;
3636 gst_query_parse_allocation (query, &caps, &need_pool);
3640 if (!gst_video_info_from_caps (&info, caps)) {
3641 GST_ERROR_OBJECT (self, "Cannot parse caps %" GST_PTR_FORMAT, caps);
3645 size = GST_VIDEO_INFO_SIZE (&info);
3647 gst_allocation_params_init (¶ms);
3649 if (!(allocator = _allocator_from_caps (self, caps)))
3652 pool = gst_va_pool_new_with_config (caps,
3653 size, self->preferred_output_delay, 0, usage_hint, allocator, ¶ms);
3655 gst_object_unref (allocator);
3659 gst_query_add_allocation_param (query, allocator, ¶ms);
3660 gst_query_add_allocation_pool (query, pool, size,
3661 self->preferred_output_delay, 0);
3663 GST_DEBUG_OBJECT (self,
3664 "proposing %" GST_PTR_FORMAT " with allocator %" GST_PTR_FORMAT,
3667 gst_object_unref (allocator);
3668 gst_object_unref (pool);
3670 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
3677 GST_ERROR_OBJECT (self, "failed to set config");
3683 gst_va_h264_enc_set_context (GstElement * element, GstContext * context)
3685 GstVaDisplay *old_display, *new_display;
3686 GstVaH264Enc *self = GST_VA_H264_ENC (element);
3687 GstVaH264EncClass *klass = GST_VA_H264_ENC_GET_CLASS (self);
3690 old_display = self->display ? gst_object_ref (self->display) : NULL;
3692 ret = gst_va_handle_set_context (element, context, klass->render_device_path,
3695 new_display = self->display ? gst_object_ref (self->display) : NULL;
3697 if (!ret || (old_display && new_display && old_display != new_display
3698 && self->encoder)) {
3699 GST_ELEMENT_WARNING (element, RESOURCE, BUSY,
3700 ("Can't replace VA display while operating"), (NULL));
3703 gst_clear_object (&old_display);
3704 gst_clear_object (&new_display);
3706 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
3710 gst_va_h264_enc_set_format (GstVideoEncoder * venc, GstVideoCodecState * state)
3712 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3713 GstVaH264EncClass *klass = GST_VA_H264_ENC_GET_CLASS (self);
3715 guint reconstruct_buffer_num;
3717 g_return_val_if_fail (state->caps != NULL, FALSE);
3719 if (self->input_state)
3720 gst_video_codec_state_unref (self->input_state);
3721 self->input_state = gst_video_codec_state_ref (state);
3723 gst_caps_replace (&self->in_caps, state->caps);
3725 if (!gst_video_info_from_caps (&self->in_info, self->in_caps))
3728 if (gst_va_h264_enc_drain (venc) != GST_FLOW_OK)
3731 if (!gst_va_encoder_close (self->encoder)) {
3732 GST_ERROR_OBJECT (self, "Failed to close the VA encoder");
3736 g_assert (klass->reconfig);
3737 if (!klass->reconfig (self)) {
3738 GST_ERROR_OBJECT (self, "Reconfig the encoder error");
3742 reconstruct_buffer_num = self->gop.num_ref_frames
3743 + self->preferred_output_delay + 3 /* scratch frames */ ;
3744 if (!gst_va_encoder_open (self->encoder, self->profile, self->entrypoint,
3745 GST_VIDEO_INFO_FORMAT (&self->in_info), self->rt_format,
3746 self->mb_width * 16, self->mb_height * 16, self->codedbuf_size,
3747 reconstruct_buffer_num, self->rc.rc_ctrl_mode,
3748 self->packed_headers)) {
3749 GST_ERROR_OBJECT (self, "Failed to open the VA encoder.");
3755 GstTagList *tags = gst_tag_list_new_empty ();
3756 const gchar *encoder_name;
3759 g_object_get (venc, "bitrate", &bitrate, NULL);
3761 gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_NOMINAL_BITRATE,
3765 gst_element_class_get_metadata (GST_ELEMENT_GET_CLASS (venc),
3766 GST_ELEMENT_METADATA_LONGNAME)))
3767 gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER,
3768 encoder_name, NULL);
3770 gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_CODEC, "H264", NULL);
3772 gst_video_encoder_merge_tags (venc, tags, GST_TAG_MERGE_REPLACE);
3773 gst_tag_list_unref (tags);
3776 out_caps = gst_va_profile_caps (self->profile);
3777 g_assert (out_caps);
3778 out_caps = gst_caps_fixate (out_caps);
3780 if (self->level_str)
3781 gst_caps_set_simple (out_caps, "level", G_TYPE_STRING, self->level_str,
3784 gst_caps_set_simple (out_caps, "width", G_TYPE_INT, self->width,
3785 "height", G_TYPE_INT, self->height, "alignment", G_TYPE_STRING, "au",
3786 "stream-format", G_TYPE_STRING, "byte-stream", NULL);
3788 GST_DEBUG_OBJECT (self, "output caps is %" GST_PTR_FORMAT, out_caps);
3790 if (self->output_state)
3791 gst_video_codec_state_unref (self->output_state);
3792 self->output_state = gst_video_encoder_set_output_state (venc, out_caps,
3795 if (!gst_video_encoder_negotiate (venc)) {
3796 GST_ERROR_OBJECT (self, "Failed to negotiate with the downstream");
3804 _query_context (GstVaH264Enc * self, GstQuery * query)
3806 GstVaDisplay *display = NULL;
3809 gst_object_replace ((GstObject **) & display, (GstObject *) self->display);
3810 ret = gst_va_handle_context_query (GST_ELEMENT_CAST (self), query, display);
3811 gst_clear_object (&display);
3817 gst_va_h264_enc_src_query (GstVideoEncoder * venc, GstQuery * query)
3819 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3820 gboolean ret = FALSE;
3822 switch (GST_QUERY_TYPE (query)) {
3823 case GST_QUERY_CONTEXT:{
3824 ret = _query_context (self, query);
3827 case GST_QUERY_CAPS:{
3828 GstCaps *caps = NULL, *tmp, *filter = NULL;
3829 GstVaEncoder *va_encoder = NULL;
3830 gboolean fixed_caps;
3832 gst_object_replace ((GstObject **) & va_encoder,
3833 (GstObject *) self->encoder);
3835 gst_query_parse_caps (query, &filter);
3837 fixed_caps = GST_PAD_IS_FIXED_CAPS (GST_VIDEO_ENCODER_SRC_PAD (venc));
3839 if (!fixed_caps && va_encoder)
3840 caps = gst_va_encoder_get_srcpad_caps (va_encoder);
3842 gst_clear_object (&va_encoder);
3846 tmp = gst_caps_intersect_full (filter, caps,
3847 GST_CAPS_INTERSECT_FIRST);
3848 gst_caps_unref (caps);
3852 GST_LOG_OBJECT (self, "Returning caps %" GST_PTR_FORMAT, caps);
3853 gst_query_set_caps_result (query, caps);
3854 gst_caps_unref (caps);
3858 /* else jump to default */
3861 ret = GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (venc, query);
3869 gst_va_h264_enc_sink_query (GstVideoEncoder * venc, GstQuery * query)
3871 GstVaH264Enc *self = GST_VA_H264_ENC (venc);
3873 if (GST_QUERY_TYPE (query) == GST_QUERY_CONTEXT)
3874 return _query_context (self, query);
3876 return GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (venc, query);
3880 static const gchar *sink_caps_str =
3881 GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_VA,
3883 GST_VIDEO_CAPS_MAKE ("{ NV12 }");
3886 static const gchar *src_caps_str = "video/x-h264";
3889 _register_debug_category (gpointer data)
3891 GST_DEBUG_CATEGORY_INIT (gst_va_h264enc_debug, "vah264enc", 0,
3898 gst_va_h264_enc_init (GTypeInstance * instance, gpointer g_class)
3900 GstVaH264Enc *self = GST_VA_H264_ENC (instance);
3902 g_queue_init (&self->reorder_list);
3903 g_queue_init (&self->ref_list);
3904 g_queue_init (&self->output_list);
3906 /* default values */
3907 self->prop.key_int_max = 0;
3908 self->prop.num_bframes = 0;
3909 self->prop.num_iframes = 0;
3910 self->prop.num_ref_frames = 3;
3911 self->prop.b_pyramid = FALSE;
3912 self->prop.num_slices = 1;
3913 self->prop.min_qp = 1;
3914 self->prop.max_qp = 51;
3915 self->prop.qp_i = 26;
3916 self->prop.qp_p = 26;
3917 self->prop.qp_b = 26;
3918 self->prop.use_dct8x8 = TRUE;
3919 self->prop.use_cabac = TRUE;
3920 self->prop.use_trellis = FALSE;
3921 self->prop.aud = FALSE;
3922 self->prop.mbbrc = 0;
3923 self->prop.bitrate = 0;
3924 self->prop.target_percentage = 66;
3925 self->prop.target_usage = 4;
3926 self->prop.rc_ctrl = VA_RC_CBR;
3927 self->prop.cpb_size = 0;
3931 gst_va_h264_enc_set_property (GObject * object, guint prop_id,
3932 const GValue * value, GParamSpec * pspec)
3934 GstVaH264Enc *const self = GST_VA_H264_ENC (object);
3936 if (self->encoder && gst_va_encoder_is_open (self->encoder)) {
3937 GST_ERROR_OBJECT (object,
3938 "failed to set any property after encoding started");
3942 GST_OBJECT_LOCK (self);
3945 case PROP_KEY_INT_MAX:
3946 self->prop.key_int_max = g_value_get_uint (value);
3949 self->prop.num_bframes = g_value_get_uint (value);
3952 self->prop.num_iframes = g_value_get_uint (value);
3954 case PROP_NUM_REF_FRAMES:
3955 self->prop.num_ref_frames = g_value_get_uint (value);
3957 case PROP_B_PYRAMID:
3958 self->prop.b_pyramid = g_value_get_boolean (value);
3960 case PROP_NUM_SLICES:
3961 self->prop.num_slices = g_value_get_uint (value);
3964 self->prop.min_qp = g_value_get_uint (value);
3967 self->prop.max_qp = g_value_get_uint (value);
3970 self->prop.qp_i = g_value_get_uint (value);
3973 self->prop.qp_p = g_value_get_uint (value);
3976 self->prop.qp_b = g_value_get_uint (value);
3979 self->prop.use_dct8x8 = g_value_get_boolean (value);
3982 self->prop.use_cabac = g_value_get_boolean (value);
3985 self->prop.use_trellis = g_value_get_boolean (value);
3988 self->prop.aud = g_value_get_boolean (value);
3991 self->prop.mbbrc = g_value_get_enum (value);
3994 self->prop.bitrate = g_value_get_uint (value);
3996 case PROP_TARGET_PERCENTAGE:
3997 self->prop.target_percentage = g_value_get_uint (value);
3999 case PROP_TARGET_USAGE:
4000 self->prop.target_usage = g_value_get_uint (value);
4002 case PROP_RATE_CONTROL:
4003 self->prop.rc_ctrl = g_value_get_enum (value);
4006 self->prop.cpb_size = g_value_get_uint (value);
4009 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
4012 GST_OBJECT_UNLOCK (self);
4016 gst_va_h264_enc_get_property (GObject * object, guint prop_id,
4017 GValue * value, GParamSpec * pspec)
4019 GstVaH264Enc *const self = GST_VA_H264_ENC (object);
4021 GST_OBJECT_LOCK (self);
4024 case PROP_KEY_INT_MAX:
4025 g_value_set_uint (value, self->prop.key_int_max);
4028 g_value_set_uint (value, self->prop.num_bframes);
4031 g_value_set_uint (value, self->prop.num_iframes);
4033 case PROP_NUM_REF_FRAMES:
4034 g_value_set_uint (value, self->prop.num_ref_frames);
4036 case PROP_B_PYRAMID:
4037 g_value_set_boolean (value, self->prop.b_pyramid);
4039 case PROP_NUM_SLICES:
4040 g_value_set_uint (value, self->prop.num_slices);
4043 g_value_set_uint (value, self->prop.min_qp);
4046 g_value_set_uint (value, self->prop.max_qp);
4049 g_value_set_uint (value, self->prop.qp_i);
4052 g_value_set_uint (value, self->prop.qp_p);
4055 g_value_set_uint (value, self->prop.qp_b);
4058 g_value_set_boolean (value, self->prop.use_dct8x8);
4061 g_value_set_boolean (value, self->prop.use_cabac);
4064 g_value_set_boolean (value, self->prop.use_trellis);
4067 g_value_set_boolean (value, self->prop.aud);
4070 g_value_set_enum (value, self->prop.mbbrc);
4073 g_value_set_uint (value, self->prop.bitrate);
4075 case PROP_TARGET_PERCENTAGE:
4076 g_value_set_uint (value, self->prop.target_percentage);
4078 case PROP_TARGET_USAGE:
4079 g_value_set_uint (value, self->prop.target_usage);
4081 case PROP_RATE_CONTROL:
4082 g_value_set_enum (value, self->prop.rc_ctrl);
4085 g_value_set_uint (value, self->prop.cpb_size);
4088 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
4091 GST_OBJECT_UNLOCK (self);
4096 gchar *render_device_path;
4103 gst_va_h264_enc_class_init (gpointer g_klass, gpointer class_data)
4105 GstCaps *src_doc_caps, *sink_doc_caps;
4106 GObjectClass *const object_class = G_OBJECT_CLASS (g_klass);
4107 GstElementClass *const element_class = GST_ELEMENT_CLASS (g_klass);
4108 GstVideoEncoderClass *const venc_class = GST_VIDEO_ENCODER_CLASS (g_klass);
4109 GstVaH264EncClass *const klass = GST_VA_H264_ENC_CLASS (g_klass);
4110 GstPadTemplate *sink_pad_templ, *src_pad_templ;
4111 struct CData *cdata = class_data;
4114 parent_class = g_type_class_peek_parent (g_klass);
4116 klass->render_device_path = g_strdup (cdata->render_device_path);
4117 klass->codec = H264;
4119 if (cdata->description) {
4120 long_name = g_strdup_printf ("VA-API H.264 Encoder in %s",
4121 cdata->description);
4123 long_name = g_strdup ("VA-API H.264 Encoder");
4126 gst_element_class_set_metadata (element_class, long_name,
4127 "Codec/Encoder/Video/Hardware", "VA-API based H.264 video encoder",
4128 "He Junyan <junyan.he@intel.com>");
4130 sink_doc_caps = gst_caps_from_string (sink_caps_str);
4131 src_doc_caps = gst_caps_from_string (src_caps_str);
4133 sink_pad_templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
4135 gst_element_class_add_pad_template (element_class, sink_pad_templ);
4137 gst_pad_template_set_documentation_caps (sink_pad_templ, sink_doc_caps);
4138 gst_caps_unref (sink_doc_caps);
4140 src_pad_templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
4142 gst_element_class_add_pad_template (element_class, src_pad_templ);
4144 gst_pad_template_set_documentation_caps (src_pad_templ, src_doc_caps);
4145 gst_caps_unref (src_doc_caps);
4147 object_class->set_property = gst_va_h264_enc_set_property;
4148 object_class->get_property = gst_va_h264_enc_get_property;
4150 element_class->set_context = GST_DEBUG_FUNCPTR (gst_va_h264_enc_set_context);
4151 venc_class->open = GST_DEBUG_FUNCPTR (gst_va_h264_enc_open);
4152 venc_class->start = GST_DEBUG_FUNCPTR (gst_va_h264_enc_start);
4153 venc_class->close = GST_DEBUG_FUNCPTR (gst_va_h264_enc_close);
4154 venc_class->stop = GST_DEBUG_FUNCPTR (gst_va_h264_enc_stop);
4155 venc_class->handle_frame = GST_DEBUG_FUNCPTR (gst_va_h264_enc_handle_frame);
4156 venc_class->finish = GST_DEBUG_FUNCPTR (gst_va_h264_enc_finish);
4157 venc_class->flush = GST_DEBUG_FUNCPTR (gst_va_h264_enc_flush);
4158 venc_class->set_format = GST_DEBUG_FUNCPTR (gst_va_h264_enc_set_format);
4159 venc_class->getcaps = GST_DEBUG_FUNCPTR (gst_va_h264_enc_get_caps);
4160 venc_class->propose_allocation =
4161 GST_DEBUG_FUNCPTR (gst_va_h264_enc_propose_allocation);
4162 venc_class->src_query = GST_DEBUG_FUNCPTR (gst_va_h264_enc_src_query);
4163 venc_class->sink_query = GST_DEBUG_FUNCPTR (gst_va_h264_enc_sink_query);
4165 klass->reconfig = GST_DEBUG_FUNCPTR (gst_va_h264_enc_reconfig);
4166 klass->push_frame = GST_DEBUG_FUNCPTR (gst_va_h264_enc_push_frame);
4167 klass->pop_frame = GST_DEBUG_FUNCPTR (gst_va_h264_enc_pop_frame);
4168 klass->encode_frame = GST_DEBUG_FUNCPTR (gst_va_h264_enc_encode_frame);
4171 g_free (cdata->description);
4172 g_free (cdata->render_device_path);
4173 gst_caps_unref (cdata->src_caps);
4174 gst_caps_unref (cdata->sink_caps);
4178 * GstVaEncoder:key-int-max:
4180 * The maximal distance between two keyframes.
4182 properties[PROP_KEY_INT_MAX] = g_param_spec_uint ("key-int-max",
4183 "Key frame maximal interval",
4184 "The maximal distance between two keyframes. It decides the size of GOP"
4185 " (0: auto-calculate)", 0, MAX_GOP_SIZE, 0,
4186 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4189 * GstVaH264Enc:b-frames:
4191 * Number of B-frames between two reference frames.
4193 properties[PROP_BFRAMES] = g_param_spec_uint ("b-frames", "B Frames",
4194 "Number of B frames between I and P reference frames", 0, 31, 0,
4195 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4196 GST_PARAM_CONDITIONALLY_AVAILABLE);
4199 * GstVaH264Enc:i-frames:
4201 * Force the number of i-frames insertion within one GOP.
4203 properties[PROP_IFRAMES] = g_param_spec_uint ("i-frames", "I Frames",
4204 "Force the number of I frames insertion within one GOP, not including the "
4205 "first IDR frame", 0, 1023, 0,
4206 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4209 * GstVaH264Enc:ref-frames:
4211 * The number of reference frames.
4213 properties[PROP_NUM_REF_FRAMES] = g_param_spec_uint ("ref-frames",
4214 "Number of Reference Frames",
4215 "Number of reference frames, including both the forward and the backward",
4216 0, 16, 3, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4219 * GstVaH264Enc:b-pyramid:
4221 * Enable the b-pyramid reference structure in GOP.
4223 properties[PROP_B_PYRAMID] = g_param_spec_boolean ("b-pyramid", "b pyramid",
4224 "Enable the b-pyramid reference structure in the GOP", FALSE,
4225 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4226 GST_PARAM_CONDITIONALLY_AVAILABLE);
4228 * GstVaH264Enc:num-slices:
4230 * The number of slices per frame.
4232 properties[PROP_NUM_SLICES] = g_param_spec_uint ("num-slices",
4233 "Number of Slices", "Number of slices per frame", 1, 200, 1,
4234 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4235 GST_PARAM_CONDITIONALLY_AVAILABLE);
4238 * GstVaH264Enc:max-qp:
4240 * The maximum quantizer value.
4242 properties[PROP_MAX_QP] = g_param_spec_uint ("max-qp", "Maximum QP",
4243 "Maximum quantizer value for each frame", 0, 51, 51,
4244 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4247 * GstVaH264Enc:min-qp:
4249 * The minimum quantizer value.
4251 properties[PROP_MIN_QP] = g_param_spec_uint ("min-qp", "Minimum QP",
4252 "Minimum quantizer value for each frame", 0, 51, 1,
4253 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4258 * The quantizer value for I frame. In CQP mode, it specifies the QP of
4259 * I frame, in other mode, it specifies the init QP of all frames.
4261 properties[PROP_QP_I] = g_param_spec_uint ("qpi", "I Frame QP",
4262 "The quantizer value for I frame. In CQP mode, it specifies the QP of I "
4263 "frame, in other mode, it specifies the init QP of all frames", 0, 51, 26,
4264 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4269 * The quantizer value for P frame. This is available only in CQP mode.
4271 properties[PROP_QP_P] = g_param_spec_uint ("qpp",
4272 "The quantizer value for P frame",
4273 "The quantizer value for P frame. This is available only in CQP mode",
4275 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4280 * The quantizer value for B frame. This is available only in CQP mode.
4282 properties[PROP_QP_B] = g_param_spec_uint ("qpb",
4283 "The quantizer value for B frame",
4284 "The quantizer value for B frame. This is available only in CQP mode",
4286 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4289 * GstVaH264Enc:dct8x8:
4291 * Enable adaptive use of 8x8 transforms in I-frames. This improves
4292 * the compression ratio but requires high profile at least.
4294 properties[PROP_DCT8X8] = g_param_spec_boolean ("dct8x8",
4296 "Enable adaptive use of 8x8 transforms in I-frames", TRUE,
4297 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4298 GST_PARAM_CONDITIONALLY_AVAILABLE);
4301 * GstVaH264Enc:cabac:
4303 * It enables CABAC entropy coding mode to improve compression ratio,
4304 * but requires main profile at least.
4306 properties[PROP_CABAC] = g_param_spec_boolean ("cabac", "Enable CABAC",
4307 "Enable CABAC entropy coding mode", TRUE,
4308 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4309 GST_PARAM_CONDITIONALLY_AVAILABLE);
4312 * GstVaH264Enc:trellis:
4314 * It enable the trellis quantization method.
4315 * Trellis is an improved quantization algorithm.
4317 properties[PROP_TRELLIS] = g_param_spec_boolean ("trellis", "Enable trellis",
4318 "Enable the trellis quantization method", FALSE,
4319 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4320 GST_PARAM_CONDITIONALLY_AVAILABLE);
4325 * Insert the AU (Access Unit) delimeter for each frame.
4327 properties[PROP_AUD] = g_param_spec_boolean ("aud", "Insert AUD",
4328 "Insert AU (Access Unit) delimeter for each frame", FALSE,
4329 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4332 * GstVaH264Enc:mbbrc:
4334 * Macroblock level bitrate control.
4335 * This is not compatible with Constant QP rate control.
4337 properties[PROP_MBBRC] = g_param_spec_enum ("mbbrc",
4338 "Macroblock level Bitrate Control",
4339 "Macroblock level Bitrate Control. It is not compatible with CQP",
4340 gst_va_h264_enc_mbbrc_get_type (), 0,
4341 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4342 GST_PARAM_CONDITIONALLY_AVAILABLE);
4345 * GstVaH264Enc:bitrate:
4347 * The desired target bitrate, expressed in kbps.
4348 * This is not available in CQP mode.
4350 * CBR: This applies equally to the minimum, maximum and target bitrate.
4351 * VBR: This applies to the target bitrate. The driver will use the
4352 * "target-percentage" together to calculate the minimum and maximum bitrate.
4353 * VCM: This applies to the target bitrate. The minimum and maximum bitrate
4356 properties[PROP_BITRATE] = g_param_spec_uint ("bitrate", "Bitrate (kbps)",
4357 "The desired bitrate expressed in kbps (0: auto-calculate)",
4359 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4362 * GstVaH264Enc:target-percentage:
4364 * The target percentage of the max bitrate, and expressed in uint,
4365 * equal to "target percentage"*100.
4366 * "target percentage" = "target bitrate" * 100 / "max bitrate"
4367 * This is available only when rate-control is VBR.
4368 * The driver uses it to calculate the minimum and maximum bitrate.
4370 properties[PROP_TARGET_PERCENTAGE] = g_param_spec_uint ("target-percentage",
4371 "target bitrate percentage",
4372 "The percentage for 'target bitrate'/'maximum bitrate' (Only in VBR)",
4374 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4377 * GstVaH264Enc:target-usage:
4379 * The target usage of the encoder. It controls and balances the encoding
4380 * speed and the encoding quality. The lower value has better quality but
4381 * slower speed, the higher value has faster speed but lower quality.
4383 properties[PROP_TARGET_USAGE] = g_param_spec_uint ("target-usage",
4385 "The target usage to control and balance the encoding speed/quality",
4386 1, 7, 4, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4389 * GstVaH264Enc:cpb-size:
4391 * The desired max CPB size in Kb (0: auto-calculate).
4393 properties[PROP_CPB_SIZE] = g_param_spec_uint ("cpb-size",
4394 "max CPB size in Kb",
4395 "The desired max CPB size in Kb (0: auto-calculate)", 0, 2000 * 1024, 0,
4396 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT);
4399 * GstVaH264Enc:rate-control:
4401 * The desired rate control mode for the encoder.
4403 properties[PROP_RATE_CONTROL] = g_param_spec_enum ("rate-control",
4404 "rate control mode", "The desired rate control mode for the encoder",
4405 gst_va_h264_enc_rate_control_get_type (), VA_RC_CBR,
4406 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT |
4407 GST_PARAM_CONDITIONALLY_AVAILABLE);
4409 g_object_class_install_properties (object_class, N_PROPERTIES, properties);
4411 gst_type_mark_as_plugin_api (gst_va_h264_enc_rate_control_get_type (), 0);
4412 gst_type_mark_as_plugin_api (gst_va_h264_enc_mbbrc_get_type (), 0);
4416 _complete_src_caps (GstCaps * srccaps)
4418 GstCaps *caps = gst_caps_copy (srccaps);
4419 GValue val = G_VALUE_INIT;
4421 g_value_init (&val, G_TYPE_STRING);
4422 g_value_set_string (&val, "au");
4423 gst_caps_set_value (caps, "alignment", &val);
4424 g_value_unset (&val);
4426 g_value_init (&val, G_TYPE_STRING);
4427 g_value_set_string (&val, "byte-stream");
4428 gst_caps_set_value (caps, "stream-format", &val);
4429 g_value_unset (&val);
4435 gst_va_h264_enc_register (GstPlugin * plugin, GstVaDevice * device,
4436 GstCaps * sink_caps, GstCaps * src_caps, guint rank)
4438 static GOnce debug_once = G_ONCE_INIT;
4440 GTypeInfo type_info = {
4441 .class_size = sizeof (GstVaH264EncClass),
4442 .class_init = gst_va_h264_enc_class_init,
4443 .instance_size = sizeof (GstVaH264Enc),
4444 .instance_init = gst_va_h264_enc_init,
4446 struct CData *cdata;
4448 gchar *type_name, *feature_name;
4450 g_return_val_if_fail (GST_IS_PLUGIN (plugin), FALSE);
4451 g_return_val_if_fail (GST_IS_VA_DEVICE (device), FALSE);
4452 g_return_val_if_fail (GST_IS_CAPS (sink_caps), FALSE);
4453 g_return_val_if_fail (GST_IS_CAPS (src_caps), FALSE);
4455 cdata = g_new (struct CData, 1);
4456 cdata->description = NULL;
4457 cdata->render_device_path = g_strdup (device->render_device_path);
4458 cdata->sink_caps = gst_caps_ref (sink_caps);
4459 cdata->src_caps = _complete_src_caps (src_caps);
4461 /* class data will be leaked if the element never gets instantiated */
4462 GST_MINI_OBJECT_FLAG_SET (cdata->sink_caps,
4463 GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
4464 GST_MINI_OBJECT_FLAG_SET (cdata->src_caps,
4465 GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
4467 type_info.class_data = cdata;
4468 type_name = g_strdup ("GstVaH264Enc");
4469 feature_name = g_strdup ("vah264enc");
4471 /* The first encoder to be registered should use a constant name,
4472 * like vah264enc, for any additional encoders, we create unique
4473 * names, using inserting the render device name. */
4474 if (g_type_from_name (type_name)) {
4475 gchar *basename = g_path_get_basename (device->render_device_path);
4477 g_free (feature_name);
4478 type_name = g_strdup_printf ("GstVa%sH264Enc", basename);
4479 feature_name = g_strdup_printf ("va%sh264enc", basename);
4480 cdata->description = basename;
4481 /* lower rank for non-first device */
4486 g_once (&debug_once, _register_debug_category, NULL);
4487 type = g_type_register_static (GST_TYPE_VIDEO_ENCODER,
4488 type_name, &type_info, 0);
4489 ret = gst_element_register (plugin, feature_name, rank, type);
4492 g_free (feature_name);