{
guint i;
- GST_DEBUG("reference picture marking process (adaptive memory control)");
-
typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
GstVaapiDecoderH264 *decoder,
GstVaapiPictureH264 *picture,
exec_ref_pic_marking_adaptive_mmco_6,
};
+ GST_DEBUG("reference picture marking process (adaptive memory control)");
+
for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
GstH264RefPicMarking * const ref_pic_marking =
&dec_ref_pic_marking->ref_pic_marking[i];
if (priv->is_svh) {
guint temp_ref = priv->svh_hdr.temporal_reference;
+ guint delta_ref;
+
if (temp_ref < priv->prev_t_ref) {
temp_ref += 256;
}
- guint delta_ref = temp_ref - priv->prev_t_ref;
+ delta_ref = temp_ref - priv->prev_t_ref;
pts = priv->sync_time;
// see temporal_reference definition in spec, 30000/1001Hz
pic_param->num_macroblocks_in_gob = priv->svh_hdr.num_macroblocks_in_gob;
}
else {
+ int i;
+
// VOL parameters
pic_param->vol_fields.bits.short_video_header = 0;
pic_param->vol_fields.bits.chroma_format = priv->vol_hdr.chroma_format;
pic_param->vol_fields.bits.reversible_vlc = priv->vol_hdr.reversible_vlc;
pic_param->vol_fields.bits.resync_marker_disable = priv->vol_hdr.resync_marker_disable;
pic_param->no_of_sprite_warping_points = priv->vol_hdr.no_of_sprite_warping_points;
- int i =0;
+
for (i=0; i<3 && i<priv->vol_hdr.no_of_sprite_warping_points ; i++) {
pic_param->sprite_trajectory_du[i] = priv->sprite_trajectory.vop_ref_points[i];
pic_param->sprite_trajectory_dv[i] = priv->sprite_trajectory.sprite_ref_points[i];
status = decode_gop(decoder, packet.data + packet.offset, packet.size);
}
else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
+ GstMpeg4Packet video_packet;
+ const guint8 *_data;
+ gint _data_size;
+
status = decode_picture(decoder, packet.data + packet.offset, packet.size);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
* while MB doesn't start from byte boundary -- it is what 'macroblock_offset'
* in slice refer to
*/
- const guint8 *_data = packet.data + packet.offset + priv->vop_hdr.size/8;
- gint _data_size = packet.size - (priv->vop_hdr.size/8);
- GstMpeg4Packet video_packet;
+ _data = packet.data + packet.offset + priv->vop_hdr.size/8;
+ _data_size = packet.size - (priv->vop_hdr.size/8);
if (priv->vol_hdr.resync_marker_disable) {
status = decode_slice(decoder, _data, _data_size, FALSE);
return status;
}
else {
+ GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
+ gboolean first_slice = TRUE;
+
// next start_code is required to determine the end of last slice
_data_size += 4;
- GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
- gboolean first_slice = TRUE;
while (_data_size > 0) {
// we can skip user data here
ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0, _data_size);
GstVaapiDecoderMpeg4 * const decoder =
GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
+ GstMpeg4Packet packet;
guchar *buf;
guint pos, buf_size;
buf[buf_size-1] = 0xb2;
pos = 0;
- GstMpeg4Packet packet;
- GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
for (i = 1; i <= num_views_minus1; i++) {
guint32 num_anchor_refs_l0 = 0;
+ guint32 num_anchor_refs_l1 = 0;
+
WRITE_UE (bs, num_anchor_refs_l0);
for (j = 0; j < num_anchor_refs_l0; j++)
WRITE_UE (bs, 0);
- guint32 num_anchor_refs_l1 = 0;
WRITE_UE (bs, num_anchor_refs_l1);
for (j = 0; j < num_anchor_refs_l1; j++)
WRITE_UE (bs, 0);
for (i = 1; i <= num_views_minus1; i++) {
guint32 num_non_anchor_refs_l0 = 0;
+ guint32 num_non_anchor_refs_l1 = 0;
+
WRITE_UE (bs, num_non_anchor_refs_l0);
for (j = 0; j < num_non_anchor_refs_l0; j++)
WRITE_UE (bs, 0);
- guint32 num_non_anchor_refs_l1 = 0;
WRITE_UE (bs, num_non_anchor_refs_l1);
for (j = 0; j < num_non_anchor_refs_l1; j++)
WRITE_UE (bs, 0);
for (i = 0; i < encoder->num_views; i++) {
GstVaapiH264ViewRefPool *const ref_pool = &encoder->ref_pools[i];
+ GstVaapiH264ViewReorderPool *const reorder_pool =
+ &encoder->reorder_pools[i];
+
ref_pool->max_reflist0_count = 1;
ref_pool->max_reflist1_count = encoder->num_bframes > 0;
ref_pool->max_ref_frames = ref_pool->max_reflist0_count
+ ref_pool->max_reflist1_count;
- GstVaapiH264ViewReorderPool *const reorder_pool =
- &encoder->reorder_pools[i];
reorder_pool->frame_index = 0;
}
}
ensure_properties ();
for (i = 0; i < N_PROPERTIES; i++) {
+ GstVaapiFilterOpData *op_data;
GParamSpec *const pspec = g_properties[i];
if (!pspec)
continue;
- GstVaapiFilterOpData *const op_data = op_data_new (i, pspec);
+ op_data = op_data_new (i, pspec);
if (!op_data)
goto error;
g_ptr_array_add (ops, op_data);
{
GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
GstVideoCodecState *state, *ref_state;
+ GstVaapiCapsFeature feature;
+ GstCapsFeatures *features = NULL;
GstVideoInfo *vi;
GstVideoFormat format = GST_VIDEO_FORMAT_I420;
+ GstClockTime latency;
+ gint fps_d, fps_n;
if (!decode->input_state)
return FALSE;
ref_state = decode->input_state;
- GstCapsFeatures *features = NULL;
- GstVaapiCapsFeature feature;
-
feature =
gst_vaapi_find_preferred_caps_feature (GST_VIDEO_DECODER_SRC_PAD (vdec),
GST_VIDEO_INFO_FORMAT (&ref_state->info), &format);
gst_caps_replace (&decode->srcpad_caps, state->caps);
gst_video_codec_state_unref (state);
- gint fps_n = GST_VIDEO_INFO_FPS_N (vi);
- gint fps_d = GST_VIDEO_INFO_FPS_D (vi);
+ fps_n = GST_VIDEO_INFO_FPS_N (vi);
+ fps_d = GST_VIDEO_INFO_FPS_D (vi);
if (fps_n <= 0 || fps_d <= 0) {
GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation");
fps_n = 25;
* latency in general, with perfectly known unit boundaries (NALU,
* AU), and up to 2 frames when we need to wait for the second frame
* start to determine the first frame is complete */
- GstClockTime latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
+ latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
gst_video_decoder_set_latency (vdec, latency, latency);
return TRUE;
{
GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
GstVaapiDecoderStatus status;
+ GstVaapiPluginBase *plugin;
GstFlowReturn ret;
if (!decode->input_state)
if (!gst_video_decoder_negotiate (vdec))
goto not_negotiated;
- GstVaapiPluginBase *const plugin = GST_VAAPI_PLUGIN_BASE (vdec);
+ plugin = GST_VAAPI_PLUGIN_BASE (vdec);
if (!gst_vaapi_plugin_base_set_caps (plugin, NULL, decode->srcpad_caps))
goto not_negotiated;
void
gst_vaapi_plugin_base_class_init (GstVaapiPluginBaseClass * klass)
{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
klass->has_interface = default_has_interface;
klass->display_changed = default_display_changed;
plugin_parent_class = g_type_class_peek_parent (klass);
- GstElementClass *const element_class = GST_ELEMENT_CLASS (klass);
element_class->set_context = GST_DEBUG_FUNCPTR (plugin_set_context);
}
gst_vaapi_video_format_new_template_caps_with_features (GstVideoFormat format,
const gchar * features_string)
{
+ GstCapsFeatures *features;
GstCaps *caps;
caps = gst_vaapi_video_format_new_template_caps (format);
if (!caps)
return NULL;
- GstCapsFeatures *const features =
- gst_caps_features_new (features_string, NULL);
+ features = gst_caps_features_new (features_string, NULL);
if (!features) {
gst_caps_unref (caps);
return NULL;
GstVaapiDeinterlaceMethod deint_method;
guint flags, deint_flags;
gboolean tff, deint, deint_refs, deint_changed;
+ const GstVideoCropMeta *crop_meta;
GstVaapiRectangle *crop_rect = NULL;
GstVaapiRectangle tmp_rect;
goto error_invalid_buffer;
inbuf_surface = gst_vaapi_video_meta_get_surface (inbuf_meta);
- GstVideoCropMeta *const crop_meta = gst_buffer_get_video_crop_meta (inbuf);
+ crop_meta = gst_buffer_get_video_crop_meta (inbuf);
if (crop_meta) {
crop_rect = &tmp_rect;
crop_rect->x = crop_meta->x;
num_structures = gst_caps_get_size (caps);
for (i = 0; i < num_structures; i++) {
GstCapsFeatures *const features = gst_caps_get_features (caps, i);
+ GstStructure *structure;
+
if (gst_caps_features_contains (features,
GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META))
continue;
- GstStructure *const structure = gst_caps_get_structure (caps, i);
+ structure = gst_caps_get_structure (caps, i);
if (!structure)
continue;
gst_structure_set_value (structure, "format", &value);
buffer_thread = g_thread_new ("get buffer thread", get_buffer_thread, app);
while (1) {
+ GstVaapiSurfaceProxy *proxy;
+ GstVaapiSurface *surface;
+
if (!load_frame (app, image))
break;
if (!gst_vaapi_image_unmap (image))
break;
- GstVaapiSurfaceProxy *proxy =
+ proxy =
gst_vaapi_surface_proxy_new_from_pool (GST_VAAPI_SURFACE_POOL (pool));
if (!proxy) {
g_warning ("Could not get surface proxy from pool.");
break;
}
- GstVaapiSurface *surface = gst_vaapi_surface_proxy_get_surface (proxy);
+ surface = gst_vaapi_surface_proxy_get_surface (proxy);
if (!surface) {
g_warning ("Could not get surface from proxy.");
break;
g_return_val_if_fail(out_value_ptr != NULL, FALSE);
if (str) {
+ const GEnumValue *enum_value;
GEnumClass * const enum_class = g_type_class_ref(type);
+
if (!enum_class)
return FALSE;
- const GEnumValue * const enum_value =
- g_enum_get_value_by_nick(enum_class, str);
+ enum_value = g_enum_get_value_by_nick(enum_class, str);
if (enum_value)
out_value = enum_value->value;
g_type_class_unref(enum_class);