libs: humongous code style fix
authorVíctor Manuel Jáquez Leal <victorx.jaquez@intel.com>
Wed, 3 Feb 2016 10:50:13 +0000 (11:50 +0100)
committerVíctor Manuel Jáquez Leal <victorx.jaquez@intel.com>
Wed, 3 Feb 2016 19:03:47 +0000 (20:03 +0100)
As part of the upstreaming process of gstreamer-vaapi into the GStreamer
umbrella, we need to comply with the project's code style. This meant to
change a lot of code.

It was decided to use a single massive patch to update the code style.

I would like to apologize with the original developers of this code because of
the history breakage.

Signed-off-by: Víctor Manuel Jáquez Leal <victorx.jaquez@intel.com>
13 files changed:
gst-libs/gst/vaapi/gstvaapidecoder_dpb.c
gst-libs/gst/vaapi/gstvaapidecoder_h264.c
gst-libs/gst/vaapi/gstvaapidecoder_jpeg.c
gst-libs/gst/vaapi/gstvaapidecoder_mpeg2.c
gst-libs/gst/vaapi/gstvaapidecoder_mpeg4.c
gst-libs/gst/vaapi/gstvaapidecoder_unit.c
gst-libs/gst/vaapi/gstvaapidecoder_vc1.c
gst-libs/gst/vaapi/gstvaapiimage.c
gst-libs/gst/vaapi/gstvaapiparser_frame.c
gst-libs/gst/vaapi/gstvaapipixmap.c
gst-libs/gst/vaapi/gstvaapipixmap_x11.c
gst-libs/gst/vaapi/gstvaapiprofile.c
gst-libs/gst/vaapi/gstvaapisubpicture.c

index d96186f..b9531fa 100644 (file)
  *
  * A decoded picture buffer (DPB) object.
  */
-struct _GstVaapiDpb {
-    /*< private >*/
-    GstVaapiMiniObject   parent_instance;
-
-    /*< protected >*/
-    GstVaapiPicture   **pictures;
-    guint               num_pictures;
-    guint               max_pictures;
+struct _GstVaapiDpb
+{
+  /*< private > */
+  GstVaapiMiniObject parent_instance;
+
+  /*< protected > */
+  GstVaapiPicture **pictures;
+  guint num_pictures;
+  guint max_pictures;
 };
 
 /**
@@ -52,124 +53,123 @@ struct _GstVaapiDpb {
  *
  * The #GstVaapiDpb base class.
  */
-struct _GstVaapiDpbClass {
-    /*< private >*/
-    GstVaapiMiniObjectClass parent_class;
-
-    /*< protected >*/
-    void      (*flush)          (GstVaapiDpb *dpb);
-    gboolean  (*add)            (GstVaapiDpb *dpb, GstVaapiPicture *picture);
-    void      (*get_neighbours) (GstVaapiDpb *dpb, GstVaapiPicture *picture,
-        GstVaapiPicture **prev_picture_ptr, GstVaapiPicture **next_picture_ptr);
+struct _GstVaapiDpbClass
+{
+  /*< private > */
+  GstVaapiMiniObjectClass parent_class;
+
+  /*< protected > */
+  void (*flush) (GstVaapiDpb * dpb);
+    gboolean (*add) (GstVaapiDpb * dpb, GstVaapiPicture * picture);
+  void (*get_neighbours) (GstVaapiDpb * dpb, GstVaapiPicture * picture,
+      GstVaapiPicture ** prev_picture_ptr, GstVaapiPicture ** next_picture_ptr);
 };
 
-static const GstVaapiMiniObjectClass *
-gst_vaapi_dpb_class(void);
+static const GstVaapiMiniObjectClass *gst_vaapi_dpb_class (void);
 
-static const GstVaapiMiniObjectClass *
-gst_vaapi_dpb2_class(void);
+static const GstVaapiMiniObjectClass *gst_vaapi_dpb2_class (void);
 
 /* ------------------------------------------------------------------------- */
 /* --- Common utilities                                                  --- */
 /* ------------------------------------------------------------------------- */
 
 static inline GstVaapiDpb *
-dpb_new(guint max_pictures)
+dpb_new (guint max_pictures)
 {
-    GstVaapiDpb *dpb;
+  GstVaapiDpb *dpb;
 
-    g_return_val_if_fail(max_pictures > 0, NULL);
+  g_return_val_if_fail (max_pictures > 0, NULL);
 
-    dpb = (GstVaapiDpb *)gst_vaapi_mini_object_new(
-        max_pictures == 2 ? gst_vaapi_dpb2_class() : gst_vaapi_dpb_class());
-    if (!dpb)
-        return NULL;
+  dpb =
+      (GstVaapiDpb *) gst_vaapi_mini_object_new (max_pictures ==
+      2 ? gst_vaapi_dpb2_class () : gst_vaapi_dpb_class ());
+  if (!dpb)
+    return NULL;
 
-    dpb->num_pictures = 0;
-    dpb->max_pictures = max_pictures;
+  dpb->num_pictures = 0;
+  dpb->max_pictures = max_pictures;
 
-    dpb->pictures = g_new0(GstVaapiPicture *, max_pictures);
-    if (!dpb->pictures)
-        goto error;
-    return dpb;
+  dpb->pictures = g_new0 (GstVaapiPicture *, max_pictures);
+  if (!dpb->pictures)
+    goto error;
+  return dpb;
 
 error:
-    gst_vaapi_dpb_unref(dpb);
-    return NULL;
+  gst_vaapi_dpb_unref (dpb);
+  return NULL;
 }
 
 static gint
-dpb_get_oldest(GstVaapiDpb *dpb, gboolean output)
+dpb_get_oldest (GstVaapiDpb * dpb, gboolean output)
 {
-    gint i, lowest_pts_index;
-
-    for (i = 0; i < dpb->num_pictures; i++) {
-        if ((GST_VAAPI_PICTURE_IS_OUTPUT(dpb->pictures[i]) ^ output) == 0)
-            break;
-    }
-    if (i == dpb->num_pictures)
-        return -1;
-
-    lowest_pts_index = i++;
-    for (; i < dpb->num_pictures; i++) {
-        GstVaapiPicture * const picture = dpb->pictures[i];
-        if ((GST_VAAPI_PICTURE_IS_OUTPUT(picture) ^ output) != 0)
-            continue;
-        if (picture->poc < dpb->pictures[lowest_pts_index]->poc)
-            lowest_pts_index = i;
-    }
-    return lowest_pts_index;
+  gint i, lowest_pts_index;
+
+  for (i = 0; i < dpb->num_pictures; i++) {
+    if ((GST_VAAPI_PICTURE_IS_OUTPUT (dpb->pictures[i]) ^ output) == 0)
+      break;
+  }
+  if (i == dpb->num_pictures)
+    return -1;
+
+  lowest_pts_index = i++;
+  for (; i < dpb->num_pictures; i++) {
+    GstVaapiPicture *const picture = dpb->pictures[i];
+    if ((GST_VAAPI_PICTURE_IS_OUTPUT (picture) ^ output) != 0)
+      continue;
+    if (picture->poc < dpb->pictures[lowest_pts_index]->poc)
+      lowest_pts_index = i;
+  }
+  return lowest_pts_index;
 }
 
 static void
-dpb_remove_index(GstVaapiDpb *dpb, guint index)
+dpb_remove_index (GstVaapiDpb * dpb, guint index)
 {
-    GstVaapiPicture ** const pictures = dpb->pictures;
-    guint num_pictures = --dpb->num_pictures;
+  GstVaapiPicture **const pictures = dpb->pictures;
+  guint num_pictures = --dpb->num_pictures;
 
-    if (index != num_pictures)
-        gst_vaapi_picture_replace(&pictures[index], pictures[num_pictures]);
-    gst_vaapi_picture_replace(&pictures[num_pictures], NULL);
+  if (index != num_pictures)
+    gst_vaapi_picture_replace (&pictures[index], pictures[num_pictures]);
+  gst_vaapi_picture_replace (&pictures[num_pictures], NULL);
 }
 
 static inline gboolean
-dpb_output(GstVaapiDpb *dpb, GstVaapiPicture *picture)
+dpb_output (GstVaapiDpb * dpb, GstVaapiPicture * picture)
 {
-    return gst_vaapi_picture_output(picture);
+  return gst_vaapi_picture_output (picture);
 }
 
 static gboolean
-dpb_bump(GstVaapiDpb *dpb)
+dpb_bump (GstVaapiDpb * dpb)
 {
-    gint index;
-    gboolean success;
+  gint index;
+  gboolean success;
 
-    index = dpb_get_oldest(dpb, FALSE);
-    if (index < 0)
-        return FALSE;
+  index = dpb_get_oldest (dpb, FALSE);
+  if (index < 0)
+    return FALSE;
 
-    success = dpb_output(dpb, dpb->pictures[index]);
-    if (!GST_VAAPI_PICTURE_IS_REFERENCE(dpb->pictures[index]))
-        dpb_remove_index(dpb, index);
-    return success;
+  success = dpb_output (dpb, dpb->pictures[index]);
+  if (!GST_VAAPI_PICTURE_IS_REFERENCE (dpb->pictures[index]))
+    dpb_remove_index (dpb, index);
+  return success;
 }
 
 static void
-dpb_clear(GstVaapiDpb *dpb)
+dpb_clear (GstVaapiDpb * dpb)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < dpb->num_pictures; i++)
-        gst_vaapi_picture_replace(&dpb->pictures[i], NULL);
-    dpb->num_pictures = 0;
+  for (i = 0; i < dpb->num_pictures; i++)
+    gst_vaapi_picture_replace (&dpb->pictures[i], NULL);
+  dpb->num_pictures = 0;
 }
 
 static void
-dpb_flush(GstVaapiDpb *dpb)
+dpb_flush (GstVaapiDpb * dpb)
 {
-    while (dpb_bump(dpb))
-        ;
-    dpb_clear(dpb);
+  while (dpb_bump (dpb));
+  dpb_clear (dpb);
 }
 
 /* ------------------------------------------------------------------------- */
@@ -177,82 +177,80 @@ dpb_flush(GstVaapiDpb *dpb)
 /* ------------------------------------------------------------------------- */
 
 static gboolean
-dpb_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
+dpb_add (GstVaapiDpb * dpb, GstVaapiPicture * picture)
 {
-    guint i;
-
-    // Remove all unused pictures
-    i = 0;
-    while (i < dpb->num_pictures) {
-        GstVaapiPicture * const picture = dpb->pictures[i];
-        if (GST_VAAPI_PICTURE_IS_OUTPUT(picture) &&
-            !GST_VAAPI_PICTURE_IS_REFERENCE(picture))
-            dpb_remove_index(dpb, i);
-        else
-            i++;
-    }
-
-    // Store reference decoded picture into the DPB
-    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
-        while (dpb->num_pictures == dpb->max_pictures) {
-            if (!dpb_bump(dpb))
-                return FALSE;
-        }
+  guint i;
+
+  // Remove all unused pictures
+  i = 0;
+  while (i < dpb->num_pictures) {
+    GstVaapiPicture *const picture = dpb->pictures[i];
+    if (GST_VAAPI_PICTURE_IS_OUTPUT (picture) &&
+        !GST_VAAPI_PICTURE_IS_REFERENCE (picture))
+      dpb_remove_index (dpb, i);
+    else
+      i++;
+  }
+
+  // Store reference decoded picture into the DPB
+  if (GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
+    while (dpb->num_pictures == dpb->max_pictures) {
+      if (!dpb_bump (dpb))
+        return FALSE;
     }
-
-    // Store non-reference decoded picture into the DPB
-    else {
-        if (GST_VAAPI_PICTURE_IS_SKIPPED(picture))
-            return TRUE;
-        while (dpb->num_pictures == dpb->max_pictures) {
-            for (i = 0; i < dpb->num_pictures; i++) {
-                if (!GST_VAAPI_PICTURE_IS_OUTPUT(picture) &&
-                    dpb->pictures[i]->poc < picture->poc)
-                    break;
-            }
-            if (i == dpb->num_pictures)
-                return dpb_output(dpb, picture);
-            if (!dpb_bump(dpb))
-                return FALSE;
-        }
+  }
+  // Store non-reference decoded picture into the DPB
+  else {
+    if (GST_VAAPI_PICTURE_IS_SKIPPED (picture))
+      return TRUE;
+    while (dpb->num_pictures == dpb->max_pictures) {
+      for (i = 0; i < dpb->num_pictures; i++) {
+        if (!GST_VAAPI_PICTURE_IS_OUTPUT (picture) &&
+            dpb->pictures[i]->poc < picture->poc)
+          break;
+      }
+      if (i == dpb->num_pictures)
+        return dpb_output (dpb, picture);
+      if (!dpb_bump (dpb))
+        return FALSE;
     }
-    gst_vaapi_picture_replace(&dpb->pictures[dpb->num_pictures++], picture);
-    return TRUE;
+  }
+  gst_vaapi_picture_replace (&dpb->pictures[dpb->num_pictures++], picture);
+  return TRUE;
 }
 
 static void
-dpb_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
-    GstVaapiPicture **prev_picture_ptr, GstVaapiPicture **next_picture_ptr)
+dpb_get_neighbours (GstVaapiDpb * dpb, GstVaapiPicture * picture,
+    GstVaapiPicture ** prev_picture_ptr, GstVaapiPicture ** next_picture_ptr)
 {
-    GstVaapiPicture *prev_picture = NULL;
-    GstVaapiPicture *next_picture = NULL;
-    guint i;
-
-    /* Find the first picture with POC > specified picture POC */
-    for (i = 0; i < dpb->num_pictures; i++) {
-        GstVaapiPicture * const ref_picture = dpb->pictures[i];
-        if (ref_picture->poc == picture->poc) {
-            if (i > 0)
-                prev_picture = dpb->pictures[i - 1];
-            if (i + 1 < dpb->num_pictures)
-                next_picture = dpb->pictures[i + 1];
-            break;
-        }
-        else if (ref_picture->poc > picture->poc) {
-            next_picture = ref_picture;
-            if (i > 0)
-                prev_picture = dpb->pictures[i - 1];
-            break;
-        }
+  GstVaapiPicture *prev_picture = NULL;
+  GstVaapiPicture *next_picture = NULL;
+  guint i;
+
+  /* Find the first picture with POC > specified picture POC */
+  for (i = 0; i < dpb->num_pictures; i++) {
+    GstVaapiPicture *const ref_picture = dpb->pictures[i];
+    if (ref_picture->poc == picture->poc) {
+      if (i > 0)
+        prev_picture = dpb->pictures[i - 1];
+      if (i + 1 < dpb->num_pictures)
+        next_picture = dpb->pictures[i + 1];
+      break;
+    } else if (ref_picture->poc > picture->poc) {
+      next_picture = ref_picture;
+      if (i > 0)
+        prev_picture = dpb->pictures[i - 1];
+      break;
     }
+  }
 
-    g_assert(next_picture ? next_picture->poc > picture->poc : TRUE);
-    g_assert(prev_picture ? prev_picture->poc < picture->poc : TRUE);
+  g_assert (next_picture ? next_picture->poc > picture->poc : TRUE);
+  g_assert (prev_picture ? prev_picture->poc < picture->poc : TRUE);
 
-    if (prev_picture_ptr)
-        *prev_picture_ptr = prev_picture;
-    if (next_picture_ptr)
-        *next_picture_ptr = next_picture;
+  if (prev_picture_ptr)
+    *prev_picture_ptr = prev_picture;
+  if (next_picture_ptr)
+    *next_picture_ptr = next_picture;
 }
 
 /* ------------------------------------------------------------------------- */
@@ -260,66 +258,66 @@ dpb_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
 /* ------------------------------------------------------------------------- */
 
 static gboolean
-dpb2_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
+dpb2_add (GstVaapiDpb * dpb, GstVaapiPicture * picture)
 {
-    GstVaapiPicture *ref_picture;
-    gint index = -1;
-
-    g_return_val_if_fail(GST_VAAPI_IS_DPB(dpb), FALSE);
-    g_return_val_if_fail(dpb->max_pictures == 2, FALSE);
-
-    /*
-     * Purpose: only store reference decoded pictures into the DPB
-     *
-     * This means:
-     * - non-reference decoded pictures are output immediately
-     * - ... thus causing older reference pictures to be output, if not already
-     * - the oldest reference picture is replaced with the new reference picture
-     */
-    if (G_LIKELY(dpb->num_pictures == 2)) {
-        index = (dpb->pictures[0]->poc > dpb->pictures[1]->poc);
-        ref_picture = dpb->pictures[index];
-        if (!GST_VAAPI_PICTURE_IS_OUTPUT(ref_picture)) {
-            if (!dpb_output(dpb, ref_picture))
-                return FALSE;
-        }
+  GstVaapiPicture *ref_picture;
+  gint index = -1;
+
+  g_return_val_if_fail (GST_VAAPI_IS_DPB (dpb), FALSE);
+  g_return_val_if_fail (dpb->max_pictures == 2, FALSE);
+
+  /*
+   * Purpose: only store reference decoded pictures into the DPB
+   *
+   * This means:
+   * - non-reference decoded pictures are output immediately
+   * - ... thus causing older reference pictures to be output, if not already
+   * - the oldest reference picture is replaced with the new reference picture
+   */
+  if (G_LIKELY (dpb->num_pictures == 2)) {
+    index = (dpb->pictures[0]->poc > dpb->pictures[1]->poc);
+    ref_picture = dpb->pictures[index];
+    if (!GST_VAAPI_PICTURE_IS_OUTPUT (ref_picture)) {
+      if (!dpb_output (dpb, ref_picture))
+        return FALSE;
     }
+  }
 
-    if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
-        return dpb_output(dpb, picture);
+  if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture))
+    return dpb_output (dpb, picture);
 
-    if (index < 0)
-        index = dpb->num_pictures++;
-    gst_vaapi_picture_replace(&dpb->pictures[index], picture);
-    return TRUE;
+  if (index < 0)
+    index = dpb->num_pictures++;
+  gst_vaapi_picture_replace (&dpb->pictures[index], picture);
+  return TRUE;
 }
 
 static void
-dpb2_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
-    GstVaapiPicture **prev_picture_ptr, GstVaapiPicture **next_picture_ptr)
+dpb2_get_neighbours (GstVaapiDpb * dpb, GstVaapiPicture * picture,
+    GstVaapiPicture ** prev_picture_ptr, GstVaapiPicture ** next_picture_ptr)
 {
-    GstVaapiPicture *ref_picture, *ref_pictures[2];
-    GstVaapiPicture **picture_ptr;
-    guint i, index;
-
-    g_return_if_fail(GST_VAAPI_IS_DPB(dpb));
-    g_return_if_fail(dpb->max_pictures == 2);
-    g_return_if_fail(GST_VAAPI_IS_PICTURE(picture));
-
-    ref_pictures[0] = NULL;
-    ref_pictures[1] = NULL;
-    for (i = 0; i < dpb->num_pictures; i++) {
-        ref_picture = dpb->pictures[i];
-        index       = ref_picture->poc > picture->poc;
-        picture_ptr = &ref_pictures[index];
-        if (!*picture_ptr || ((*picture_ptr)->poc > ref_picture->poc) == index)
-            *picture_ptr = ref_picture;
-    }
-
-    if (prev_picture_ptr)
-        *prev_picture_ptr = ref_pictures[0];
-    if (next_picture_ptr)
-        *next_picture_ptr = ref_pictures[1];
+  GstVaapiPicture *ref_picture, *ref_pictures[2];
+  GstVaapiPicture **picture_ptr;
+  guint i, index;
+
+  g_return_if_fail (GST_VAAPI_IS_DPB (dpb));
+  g_return_if_fail (dpb->max_pictures == 2);
+  g_return_if_fail (GST_VAAPI_IS_PICTURE (picture));
+
+  ref_pictures[0] = NULL;
+  ref_pictures[1] = NULL;
+  for (i = 0; i < dpb->num_pictures; i++) {
+    ref_picture = dpb->pictures[i];
+    index = ref_picture->poc > picture->poc;
+    picture_ptr = &ref_pictures[index];
+    if (!*picture_ptr || ((*picture_ptr)->poc > ref_picture->poc) == index)
+      *picture_ptr = ref_picture;
+  }
+
+  if (prev_picture_ptr)
+    *prev_picture_ptr = ref_pictures[0];
+  if (next_picture_ptr)
+    *next_picture_ptr = ref_pictures[1];
 }
 
 /* ------------------------------------------------------------------------- */
@@ -327,92 +325,94 @@ dpb2_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
 /* ------------------------------------------------------------------------- */
 
 static void
-gst_vaapi_dpb_finalize(GstVaapiDpb *dpb)
+gst_vaapi_dpb_finalize (GstVaapiDpb * dpb)
 {
-    dpb_clear(dpb);
-    g_free(dpb->pictures);
+  dpb_clear (dpb);
+  g_free (dpb->pictures);
 }
 
 static const GstVaapiMiniObjectClass *
-gst_vaapi_dpb_class(void)
+gst_vaapi_dpb_class (void)
 {
-    static const GstVaapiDpbClass GstVaapiDpbClass = {
-        { sizeof(GstVaapiDpb),
-          (GDestroyNotify)gst_vaapi_dpb_finalize },
-
-        dpb_flush,
-        dpb_add,
-        dpb_get_neighbours
-    };
-    return &GstVaapiDpbClass.parent_class;
+  static const GstVaapiDpbClass GstVaapiDpbClass = {
+    {sizeof (GstVaapiDpb),
+        (GDestroyNotify) gst_vaapi_dpb_finalize}
+    ,
+
+    dpb_flush,
+    dpb_add,
+    dpb_get_neighbours
+  };
+  return &GstVaapiDpbClass.parent_class;
 }
 
 static const GstVaapiMiniObjectClass *
-gst_vaapi_dpb2_class(void)
+gst_vaapi_dpb2_class (void)
 {
-    static const GstVaapiDpbClass GstVaapiDpb2Class = {
-        { sizeof(GstVaapiDpb),
-          (GDestroyNotify)gst_vaapi_dpb_finalize },
-
-        dpb_flush,
-        dpb2_add,
-        dpb2_get_neighbours
-    };
-    return &GstVaapiDpb2Class.parent_class;
+  static const GstVaapiDpbClass GstVaapiDpb2Class = {
+    {sizeof (GstVaapiDpb),
+        (GDestroyNotify) gst_vaapi_dpb_finalize}
+    ,
+
+    dpb_flush,
+    dpb2_add,
+    dpb2_get_neighbours
+  };
+  return &GstVaapiDpb2Class.parent_class;
 }
 
 GstVaapiDpb *
-gst_vaapi_dpb_new(guint max_pictures)
+gst_vaapi_dpb_new (guint max_pictures)
 {
-    return dpb_new(max_pictures);
+  return dpb_new (max_pictures);
 }
 
 void
-gst_vaapi_dpb_flush(GstVaapiDpb *dpb)
+gst_vaapi_dpb_flush (GstVaapiDpb * dpb)
 {
-    const GstVaapiDpbClass *klass;
+  const GstVaapiDpbClass *klass;
 
-    g_return_if_fail(GST_VAAPI_IS_DPB(dpb));
+  g_return_if_fail (GST_VAAPI_IS_DPB (dpb));
 
-    klass = GST_VAAPI_DPB_GET_CLASS(dpb);
-    if (G_UNLIKELY(!klass || !klass->add))
-        return;
-    klass->flush(dpb);
+  klass = GST_VAAPI_DPB_GET_CLASS (dpb);
+  if (G_UNLIKELY (!klass || !klass->add))
+    return;
+  klass->flush (dpb);
 }
 
 gboolean
-gst_vaapi_dpb_add(GstVaapiDpb *dpb, GstVaapiPicture *picture)
+gst_vaapi_dpb_add (GstVaapiDpb * dpb, GstVaapiPicture * picture)
 {
-    const GstVaapiDpbClass *klass;
+  const GstVaapiDpbClass *klass;
 
-    g_return_val_if_fail(GST_VAAPI_IS_DPB(dpb), FALSE);
-    g_return_val_if_fail(GST_VAAPI_IS_PICTURE(picture), FALSE);
+  g_return_val_if_fail (GST_VAAPI_IS_DPB (dpb), FALSE);
+  g_return_val_if_fail (GST_VAAPI_IS_PICTURE (picture), FALSE);
 
-    klass = GST_VAAPI_DPB_GET_CLASS(dpb);
-    if (G_UNLIKELY(!klass || !klass->add))
-        return FALSE;
-    return klass->add(dpb, picture);
+  klass = GST_VAAPI_DPB_GET_CLASS (dpb);
+  if (G_UNLIKELY (!klass || !klass->add))
+    return FALSE;
+  return klass->add (dpb, picture);
 }
 
 guint
-gst_vaapi_dpb_size(GstVaapiDpb *dpb)
+gst_vaapi_dpb_size (GstVaapiDpb * dpb)
 {
-    g_return_val_if_fail(GST_VAAPI_IS_DPB(dpb), 0);
+  g_return_val_if_fail (GST_VAAPI_IS_DPB (dpb), 0);
 
-    return dpb->num_pictures;
+  return dpb->num_pictures;
 }
 
 void
-gst_vaapi_dpb_get_neighbours(GstVaapiDpb *dpb, GstVaapiPicture *picture,
-    GstVaapiPicture **prev_picture_ptr, GstVaapiPicture **next_picture_ptr)
+gst_vaapi_dpb_get_neighbours (GstVaapiDpb * dpb, GstVaapiPicture * picture,
+    GstVaapiPicture ** prev_picture_ptr, GstVaapiPicture ** next_picture_ptr)
 {
-    const GstVaapiDpbClass *klass;
+  const GstVaapiDpbClass *klass;
 
-    g_return_if_fail(GST_VAAPI_IS_DPB(dpb));
-    g_return_if_fail(GST_VAAPI_IS_PICTURE(picture));
+  g_return_if_fail (GST_VAAPI_IS_DPB (dpb));
+  g_return_if_fail (GST_VAAPI_IS_PICTURE (picture));
 
-    klass = GST_VAAPI_DPB_GET_CLASS(dpb);
-    if (G_UNLIKELY(!klass || !klass->get_neighbours))
-        return;
-    klass->get_neighbours(dpb, picture, prev_picture_ptr, next_picture_ptr);
+  klass = GST_VAAPI_DPB_GET_CLASS (dpb);
+  if (G_UNLIKELY (!klass || !klass->get_neighbours))
+    return;
+  klass->get_neighbours (dpb, picture, prev_picture_ptr, next_picture_ptr);
 }
index ffe9e5f..5f08e07 100644 (file)
 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
 #define USE_STRICT_DPB_ORDERING 0
 
-typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
-typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
-typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
-typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
-typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
-typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
+typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
+typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
+typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
+typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
+typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
+typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
 
 // Used for field_poc[]
 #define TOP_FIELD       0
@@ -63,74 +63,75 @@ typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
  */
-enum {
-    /* This flag does not strictly follow the definitions (7.4.1.2.3)
-       for detecting the start of an access unit as we are only
-       interested in knowing if the current slice is the first one or
-       the last one in the current access unit */
-    GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
-        GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
-    GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
-        GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
-
-    GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
-        GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
-        GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
+enum
+{
+  /* This flag does not strictly follow the definitions (7.4.1.2.3)
+     for detecting the start of an access unit as we are only
+     interested in knowing if the current slice is the first one or
+     the last one in the current access unit */
+  GST_VAAPI_DECODER_UNIT_FLAG_AU_START =
+      (GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
+  GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
+
+  GST_VAAPI_DECODER_UNIT_FLAGS_AU = (GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+      GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
 };
 
 #define GST_VAAPI_PARSER_INFO_H264(obj) \
     ((GstVaapiParserInfoH264 *)(obj))
 
-struct _GstVaapiParserInfoH264 {
-    GstVaapiMiniObject  parent_instance;
-    GstH264NalUnit      nalu;
-    union {
-        GstH264SPS      sps;
-        GstH264PPS      pps;
-        GArray         *sei;
-        GstH264SliceHdr slice_hdr;
-    }                   data;
-    guint               state;
-    guint               flags;      // Same as decoder unit flags (persistent)
-    guint               view_id;    // View ID of slice
-    guint               voc;        // View order index (VOIdx) of slice
+struct _GstVaapiParserInfoH264
+{
+  GstVaapiMiniObject parent_instance;
+  GstH264NalUnit nalu;
+  union
+  {
+    GstH264SPS sps;
+    GstH264PPS pps;
+    GArray *sei;
+    GstH264SliceHdr slice_hdr;
+  } data;
+  guint state;
+  guint flags;                  // Same as decoder unit flags (persistent)
+  guint view_id;                // View ID of slice
+  guint voc;                    // View order index (VOIdx) of slice
 };
 
 static void
-gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
+gst_vaapi_parser_info_h264_finalize (GstVaapiParserInfoH264 * pi)
 {
-    switch (pi->nalu.type) {
+  switch (pi->nalu.type) {
     case GST_H264_NAL_SPS:
     case GST_H264_NAL_SUBSET_SPS:
-        gst_h264_sps_clear(&pi->data.sps);
-        break;
+      gst_h264_sps_clear (&pi->data.sps);
+      break;
     case GST_H264_NAL_PPS:
-        gst_h264_pps_clear(&pi->data.pps);
-        break;
+      gst_h264_pps_clear (&pi->data.pps);
+      break;
     case GST_H264_NAL_SEI:
-        if (pi->data.sei) {
-            g_array_unref(pi->data.sei);
-            pi->data.sei = NULL;
-        }
-        break;
-    }
+      if (pi->data.sei) {
+        g_array_unref (pi->data.sei);
+        pi->data.sei = NULL;
+      }
+      break;
+  }
 }
 
 static inline const GstVaapiMiniObjectClass *
-gst_vaapi_parser_info_h264_class(void)
+gst_vaapi_parser_info_h264_class (void)
 {
-    static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
-        .size = sizeof(GstVaapiParserInfoH264),
-        .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
-    };
-    return &GstVaapiParserInfoH264Class;
+  static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
+    .size = sizeof (GstVaapiParserInfoH264),
+    .finalize = (GDestroyNotify) gst_vaapi_parser_info_h264_finalize
+  };
+  return &GstVaapiParserInfoH264Class;
 }
 
 static inline GstVaapiParserInfoH264 *
-gst_vaapi_parser_info_h264_new(void)
+gst_vaapi_parser_info_h264_new (void)
 {
-    return (GstVaapiParserInfoH264 *)
-        gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
+  return (GstVaapiParserInfoH264 *)
+      gst_vaapi_mini_object_new (gst_vaapi_parser_info_h264_class ());
 }
 
 #define gst_vaapi_parser_info_h264_ref(pi) \
@@ -170,22 +171,23 @@ gst_vaapi_parser_info_h264_new(void)
  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
  *     reference picture (short-term reference or long-term reference)
  */
-enum {
-    GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
-    GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
-    GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
-    GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
-    GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
-    GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
-    GST_VAAPI_PICTURE_FLAG_GHOST        = (GST_VAAPI_PICTURE_FLAG_LAST << 6),
-
-    GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
-        GST_VAAPI_PICTURE_FLAG_REFERENCE),
-    GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
-        GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
-    GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
-        GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
-        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
+enum
+{
+  GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+  GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
+  GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
+  GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
+  GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
+  GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
+  GST_VAAPI_PICTURE_FLAG_GHOST = (GST_VAAPI_PICTURE_FLAG_LAST << 6),
+
+  GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE =
+      (GST_VAAPI_PICTURE_FLAG_REFERENCE),
+  GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE =
+      (GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
+  GST_VAAPI_PICTURE_FLAGS_REFERENCE =
+      (GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
+      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
 };
 
 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
@@ -210,237 +212,233 @@ enum {
 #define GST_VAAPI_PICTURE_H264(picture) \
     ((GstVaapiPictureH264 *)(picture))
 
-struct _GstVaapiPictureH264 {
-    GstVaapiPicture             base;
-    GstH264SliceHdr            *last_slice_hdr;
-    guint                       structure;
-    gint32                      field_poc[2];
-    gint32                      frame_num;              // Original frame_num from slice_header()
-    gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
-    gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
-    gint32                      pic_num;                // Temporary for ref pic marking: PicNum
-    gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
-    GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
-    guint                       output_flag             : 1;
-    guint                       output_needed           : 1;
+struct _GstVaapiPictureH264
+{
+  GstVaapiPicture base;
+  GstH264SliceHdr *last_slice_hdr;
+  guint structure;
+  gint32 field_poc[2];
+  gint32 frame_num;             // Original frame_num from slice_header()
+  gint32 frame_num_wrap;        // Temporary for ref pic marking: FrameNumWrap
+  gint32 long_term_frame_idx;   // Temporary for ref pic marking: LongTermFrameIdx
+  gint32 pic_num;               // Temporary for ref pic marking: PicNum
+  gint32 long_term_pic_num;     // Temporary for ref pic marking: LongTermPicNum
+  GstVaapiPictureH264 *other_field;     // Temporary for ref pic marking: other field in the same frame store
+  guint output_flag:1;
+  guint output_needed:1;
 };
 
-GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
+GST_VAAPI_CODEC_DEFINE_TYPE (GstVaapiPictureH264, gst_vaapi_picture_h264);
 
 void
-gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
+gst_vaapi_picture_h264_destroy (GstVaapiPictureH264 * picture)
 {
-    gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
+  gst_vaapi_picture_destroy (GST_VAAPI_PICTURE (picture));
 }
 
 gboolean
-gst_vaapi_picture_h264_create(
-    GstVaapiPictureH264                      *picture,
-    const GstVaapiCodecObjectConstructorArgs *args
-)
+gst_vaapi_picture_h264_create (GstVaapiPictureH264 * picture,
+    const GstVaapiCodecObjectConstructorArgs * args)
 {
-    if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
-        return FALSE;
+  if (!gst_vaapi_picture_create (GST_VAAPI_PICTURE (picture), args))
+    return FALSE;
 
-    picture->structure          = picture->base.structure;
-    picture->field_poc[0]       = G_MAXINT32;
-    picture->field_poc[1]       = G_MAXINT32;
-    picture->output_needed      = FALSE;
-    return TRUE;
+  picture->structure = picture->base.structure;
+  picture->field_poc[0] = G_MAXINT32;
+  picture->field_poc[1] = G_MAXINT32;
+  picture->output_needed = FALSE;
+  return TRUE;
 }
 
 static inline GstVaapiPictureH264 *
-gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
+gst_vaapi_picture_h264_new (GstVaapiDecoderH264 * decoder)
 {
-    return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
-        &GstVaapiPictureH264Class,
-        GST_VAAPI_CODEC_BASE(decoder),
-        NULL, sizeof(VAPictureParameterBufferH264),
-        NULL, 0,
-        0);
+  return (GstVaapiPictureH264 *)
+      gst_vaapi_codec_object_new (&GstVaapiPictureH264Class,
+      GST_VAAPI_CODEC_BASE (decoder), NULL,
+      sizeof (VAPictureParameterBufferH264), NULL, 0, 0);
 }
 
 static inline void
-gst_vaapi_picture_h264_set_reference(
-    GstVaapiPictureH264 *picture,
-    guint                reference_flags,
-    gboolean             other_field
-)
+gst_vaapi_picture_h264_set_reference (GstVaapiPictureH264 * picture,
+    guint reference_flags, gboolean other_field)
 {
-    if (!picture)
-        return;
-    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
-    GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
+  if (!picture)
+    return;
+  GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
+  GST_VAAPI_PICTURE_FLAG_SET (picture, reference_flags);
 
-    if (!other_field || !(picture = picture->other_field))
-        return;
-    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
-    GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
+  if (!other_field || !(picture = picture->other_field))
+    return;
+  GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
+  GST_VAAPI_PICTURE_FLAG_SET (picture, reference_flags);
 }
 
 static inline GstVaapiPictureH264 *
-gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
+gst_vaapi_picture_h264_new_field (GstVaapiPictureH264 * picture)
 {
-    g_return_val_if_fail(picture, NULL);
+  g_return_val_if_fail (picture, NULL);
 
-    return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
+  return (GstVaapiPictureH264 *) gst_vaapi_picture_new_field (&picture->base);
 }
 
 static inline GstVaapiPictureH264 *
-gst_vaapi_picture_h264_new_clone(GstVaapiPictureH264 *picture)
+gst_vaapi_picture_h264_new_clone (GstVaapiPictureH264 * picture)
 {
-    g_return_val_if_fail(picture, NULL);
+  g_return_val_if_fail (picture, NULL);
 
-    return (GstVaapiPictureH264 *)gst_vaapi_picture_new_clone(&picture->base);
+  return (GstVaapiPictureH264 *) gst_vaapi_picture_new_clone (&picture->base);
 }
 
 /* ------------------------------------------------------------------------- */
 /* --- Frame Buffers (DPB)                                               --- */
 /* ------------------------------------------------------------------------- */
 
-struct _GstVaapiFrameStore {
-    /*< private >*/
-    GstVaapiMiniObject          parent_instance;
+struct _GstVaapiFrameStore
+{
+  /*< private > */
+  GstVaapiMiniObject parent_instance;
 
-    guint                       view_id;
-    guint                       structure;
-    GstVaapiPictureH264        *buffers[2];
-    guint                       num_buffers;
-    guint                       output_needed;
-    guint                       output_called;
+  guint view_id;
+  guint structure;
+  GstVaapiPictureH264 *buffers[2];
+  guint num_buffers;
+  guint output_needed;
+  guint output_called;
 };
 
 static void
-gst_vaapi_frame_store_finalize(gpointer object)
+gst_vaapi_frame_store_finalize (gpointer object)
 {
-    GstVaapiFrameStore * const fs = object;
-    guint i;
+  GstVaapiFrameStore *const fs = object;
+  guint i;
 
-    for (i = 0; i < fs->num_buffers; i++)
-        gst_vaapi_picture_replace(&fs->buffers[i], NULL);
+  for (i = 0; i < fs->num_buffers; i++)
+    gst_vaapi_picture_replace (&fs->buffers[i], NULL);
 }
 
 static GstVaapiFrameStore *
-gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
+gst_vaapi_frame_store_new (GstVaapiPictureH264 * picture)
 {
-    GstVaapiFrameStore *fs;
+  GstVaapiFrameStore *fs;
 
-    static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
-        sizeof(GstVaapiFrameStore),
-        gst_vaapi_frame_store_finalize
-    };
+  static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
+    sizeof (GstVaapiFrameStore),
+    gst_vaapi_frame_store_finalize
+  };
 
-    fs = (GstVaapiFrameStore *)
-        gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
-    if (!fs)
-        return NULL;
-
-    fs->view_id         = picture->base.view_id;
-    fs->structure       = picture->structure;
-    fs->buffers[0]      = gst_vaapi_picture_ref(picture);
-    fs->buffers[1]      = NULL;
-    fs->num_buffers     = 1;
-    fs->output_needed   = 0;
-    fs->output_called   = 0;
-
-    if (picture->output_flag) {
-        picture->output_needed = TRUE;
-        fs->output_needed++;
-    }
-    return fs;
+  fs = (GstVaapiFrameStore *)
+      gst_vaapi_mini_object_new (&GstVaapiFrameStoreClass);
+  if (!fs)
+    return NULL;
+
+  fs->view_id = picture->base.view_id;
+  fs->structure = picture->structure;
+  fs->buffers[0] = gst_vaapi_picture_ref (picture);
+  fs->buffers[1] = NULL;
+  fs->num_buffers = 1;
+  fs->output_needed = 0;
+  fs->output_called = 0;
+
+  if (picture->output_flag) {
+    picture->output_needed = TRUE;
+    fs->output_needed++;
+  }
+  return fs;
 }
 
 static gboolean
-gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
+gst_vaapi_frame_store_add (GstVaapiFrameStore * fs,
+    GstVaapiPictureH264 * picture)
 {
-    guint field;
+  guint field;
 
-    g_return_val_if_fail(fs->num_buffers == 1, FALSE);
-    g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
-    g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
+  g_return_val_if_fail (fs->num_buffers == 1, FALSE);
+  g_return_val_if_fail (!GST_VAAPI_PICTURE_IS_FRAME (picture), FALSE);
+  g_return_val_if_fail (!GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture), FALSE);
 
-    gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
-    if (picture->output_flag) {
-        picture->output_needed = TRUE;
-        fs->output_needed++;
-    }
+  gst_vaapi_picture_replace (&fs->buffers[fs->num_buffers++], picture);
+  if (picture->output_flag) {
+    picture->output_needed = TRUE;
+    fs->output_needed++;
+  }
 
-    fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+  fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
 
-    field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
-        TOP_FIELD : BOTTOM_FIELD;
-    g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
-    fs->buffers[0]->field_poc[field] = picture->field_poc[field];
-    g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
-    picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
-    return TRUE;
+  field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
+      TOP_FIELD : BOTTOM_FIELD;
+  g_return_val_if_fail (fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
+  fs->buffers[0]->field_poc[field] = picture->field_poc[field];
+  g_return_val_if_fail (picture->field_poc[!field] == G_MAXINT32, FALSE);
+  picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
+  return TRUE;
 }
 
 static gboolean
-gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs, gboolean tff)
+gst_vaapi_frame_store_split_fields (GstVaapiFrameStore * fs, gboolean tff)
 {
-    GstVaapiPictureH264 * const first_field = fs->buffers[0];
-    GstVaapiPictureH264 *second_field;
+  GstVaapiPictureH264 *const first_field = fs->buffers[0];
+  GstVaapiPictureH264 *second_field;
 
-    g_return_val_if_fail(fs->num_buffers == 1, FALSE);
+  g_return_val_if_fail (fs->num_buffers == 1, FALSE);
 
-    first_field->base.structure = tff ?
-        GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD :
-        GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
-    GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
+  first_field->base.structure = tff ?
+      GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD :
+      GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
+  GST_VAAPI_PICTURE_FLAG_SET (first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
 
-    second_field = gst_vaapi_picture_h264_new_field(first_field);
-    if (!second_field)
-        return FALSE;
-    gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
-    gst_vaapi_picture_unref(second_field);
-
-    second_field->frame_num    = first_field->frame_num;
-    second_field->field_poc[0] = first_field->field_poc[0];
-    second_field->field_poc[1] = first_field->field_poc[1];
-    second_field->output_flag  = first_field->output_flag;
-    if (second_field->output_flag) {
-        second_field->output_needed = TRUE;
-        fs->output_needed++;
-    }
-    return TRUE;
+  second_field = gst_vaapi_picture_h264_new_field (first_field);
+  if (!second_field)
+    return FALSE;
+  gst_vaapi_picture_replace (&fs->buffers[fs->num_buffers++], second_field);
+  gst_vaapi_picture_unref (second_field);
+
+  second_field->frame_num = first_field->frame_num;
+  second_field->field_poc[0] = first_field->field_poc[0];
+  second_field->field_poc[1] = first_field->field_poc[1];
+  second_field->output_flag = first_field->output_flag;
+  if (second_field->output_flag) {
+    second_field->output_needed = TRUE;
+    fs->output_needed++;
+  }
+  return TRUE;
 }
 
 static inline gboolean
-gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
+gst_vaapi_frame_store_has_frame (GstVaapiFrameStore * fs)
 {
-    return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+  return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
 }
 
 static inline gboolean
-gst_vaapi_frame_store_is_complete(GstVaapiFrameStore *fs)
+gst_vaapi_frame_store_is_complete (GstVaapiFrameStore * fs)
 {
-    return gst_vaapi_frame_store_has_frame(fs) ||
-        GST_VAAPI_PICTURE_IS_ONEFIELD(fs->buffers[0]);
+  return gst_vaapi_frame_store_has_frame (fs) ||
+      GST_VAAPI_PICTURE_IS_ONEFIELD (fs->buffers[0]);
 }
 
 static inline gboolean
-gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
+gst_vaapi_frame_store_has_reference (GstVaapiFrameStore * fs)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < fs->num_buffers; i++) {
-        if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
-            return TRUE;
-    }
-    return FALSE;
+  for (i = 0; i < fs->num_buffers; i++) {
+    if (GST_VAAPI_PICTURE_IS_REFERENCE (fs->buffers[i]))
+      return TRUE;
+  }
+  return FALSE;
 }
 
 static gboolean
-gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
+gst_vaapi_frame_store_has_inter_view (GstVaapiFrameStore * fs)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < fs->num_buffers; i++) {
-        if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
-            return TRUE;
-    }
-    return FALSE;
+  for (i = 0; i < fs->num_buffers; i++) {
+    if (GST_VAAPI_PICTURE_IS_INTER_VIEW (fs->buffers[i]))
+      return TRUE;
+  }
+  return FALSE;
 }
 
 #define gst_vaapi_frame_store_ref(fs) \
@@ -460,75 +458,76 @@ gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
     ((GstVaapiDecoderH264 *)(decoder))
 
-typedef enum {
-    GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
-    GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
-    GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
-    GST_H264_VIDEO_STATE_GOT_I_FRAME    = 1 << 3, // persistent across SPS
-    GST_H264_VIDEO_STATE_GOT_P_SLICE    = 1 << 4, // predictive (all non-intra)
-
-    GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
-        GST_H264_VIDEO_STATE_GOT_SPS |
-        GST_H264_VIDEO_STATE_GOT_PPS),
-    GST_H264_VIDEO_STATE_VALID_PICTURE = (
-        GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
-        GST_H264_VIDEO_STATE_GOT_SLICE)
+typedef enum
+{
+  GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
+  GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
+  GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
+  GST_H264_VIDEO_STATE_GOT_I_FRAME = 1 << 3,    // persistent across SPS
+  GST_H264_VIDEO_STATE_GOT_P_SLICE = 1 << 4,    // predictive (all non-intra)
+
+  GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (GST_H264_VIDEO_STATE_GOT_SPS |
+      GST_H264_VIDEO_STATE_GOT_PPS),
+  GST_H264_VIDEO_STATE_VALID_PICTURE =
+      (GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
+      GST_H264_VIDEO_STATE_GOT_SLICE)
 } GstH264VideoState;
 
-struct _GstVaapiDecoderH264Private {
-    GstH264NalParser           *parser;
-    guint                       parser_state;
-    guint                       decoder_state;
-    GstVaapiStreamAlignH264     stream_alignment;
-    GstVaapiPictureH264        *current_picture;
-    GstVaapiPictureH264        *missing_picture;
-    GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
-    GstVaapiParserInfoH264     *active_sps;
-    GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
-    GstVaapiParserInfoH264     *active_pps;
-    GstVaapiParserInfoH264     *prev_pi;
-    GstVaapiParserInfoH264     *prev_slice_pi;
-    GstVaapiFrameStore        **prev_ref_frames;
-    GstVaapiFrameStore        **prev_frames;
-    guint                       prev_frames_alloc;
-    GstVaapiFrameStore        **dpb;
-    guint                       dpb_count;
-    guint                       dpb_size;
-    guint                       dpb_size_max;
-    guint                       max_views;
-    GstVaapiProfile             profile;
-    GstVaapiEntrypoint          entrypoint;
-    GstVaapiChromaType          chroma_type;
-    GPtrArray                  *inter_views;
-    GstVaapiPictureH264        *short_ref[32];
-    guint                       short_ref_count;
-    GstVaapiPictureH264        *long_ref[32];
-    guint                       long_ref_count;
-    GstVaapiPictureH264        *RefPicList0[32];
-    guint                       RefPicList0_count;
-    GstVaapiPictureH264        *RefPicList1[32];
-    guint                       RefPicList1_count;
-    guint                       nal_length_size;
-    guint                       mb_width;
-    guint                       mb_height;
-    guint                       pic_structure;          // pic_struct (from SEI pic_timing() or inferred)
-    gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
-    gint32                      poc_msb;                // PicOrderCntMsb
-    gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
-    gint32                      prev_poc_msb;           // prevPicOrderCntMsb
-    gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
-    gint32                      frame_num_offset;       // FrameNumOffset
-    gint32                      frame_num;              // frame_num (from slice_header())
-    gint32                      prev_frame_num;         // prevFrameNum
-    gint32                      prev_ref_frame_num;     // prevRefFrameNum
-    gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
-    gboolean                    prev_pic_reference;     // previous picture is a reference
-    guint                       prev_pic_structure;     // previous picture structure
-    guint                       is_opened               : 1;
-    guint                       is_avcC                 : 1;
-    guint                       has_context             : 1;
-    guint                       progressive_sequence    : 1;
-    guint                       top_field_first         : 1;
+struct _GstVaapiDecoderH264Private
+{
+  GstH264NalParser *parser;
+  guint parser_state;
+  guint decoder_state;
+  GstVaapiStreamAlignH264 stream_alignment;
+  GstVaapiPictureH264 *current_picture;
+  GstVaapiPictureH264 *missing_picture;
+  GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
+  GstVaapiParserInfoH264 *active_sps;
+  GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
+  GstVaapiParserInfoH264 *active_pps;
+  GstVaapiParserInfoH264 *prev_pi;
+  GstVaapiParserInfoH264 *prev_slice_pi;
+  GstVaapiFrameStore **prev_ref_frames;
+  GstVaapiFrameStore **prev_frames;
+  guint prev_frames_alloc;
+  GstVaapiFrameStore **dpb;
+  guint dpb_count;
+  guint dpb_size;
+  guint dpb_size_max;
+  guint max_views;
+  GstVaapiProfile profile;
+  GstVaapiEntrypoint entrypoint;
+  GstVaapiChromaType chroma_type;
+  GPtrArray *inter_views;
+  GstVaapiPictureH264 *short_ref[32];
+  guint short_ref_count;
+  GstVaapiPictureH264 *long_ref[32];
+  guint long_ref_count;
+  GstVaapiPictureH264 *RefPicList0[32];
+  guint RefPicList0_count;
+  GstVaapiPictureH264 *RefPicList1[32];
+  guint RefPicList1_count;
+  guint nal_length_size;
+  guint mb_width;
+  guint mb_height;
+  guint pic_structure;          // pic_struct (from SEI pic_timing() or inferred)
+  gint32 field_poc[2];          // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
+  gint32 poc_msb;               // PicOrderCntMsb
+  gint32 poc_lsb;               // pic_order_cnt_lsb (from slice_header())
+  gint32 prev_poc_msb;          // prevPicOrderCntMsb
+  gint32 prev_poc_lsb;          // prevPicOrderCntLsb
+  gint32 frame_num_offset;      // FrameNumOffset
+  gint32 frame_num;             // frame_num (from slice_header())
+  gint32 prev_frame_num;        // prevFrameNum
+  gint32 prev_ref_frame_num;    // prevRefFrameNum
+  gboolean prev_pic_has_mmco5;  // prevMmco5Pic
+  gboolean prev_pic_reference;  // previous picture is a reference
+  guint prev_pic_structure;     // previous picture structure
+  guint is_opened:1;
+  guint is_avcC:1;
+  guint has_context:1;
+  guint progressive_sequence:1;
+  guint top_field_first:1;
 };
 
 /**
@@ -536,10 +535,11 @@ struct _GstVaapiDecoderH264Private {
  *
  * A decoder based on H264.
  */
-struct _GstVaapiDecoderH264 {
-    /*< private >*/
-    GstVaapiDecoder             parent_instance;
-    GstVaapiDecoderH264Private  priv;
+struct _GstVaapiDecoderH264
+{
+  /*< private > */
+  GstVaapiDecoder parent_instance;
+  GstVaapiDecoderH264Private priv;
 };
 
 /**
@@ -547,161 +547,162 @@ struct _GstVaapiDecoderH264 {
  *
  * A decoder class based on H264.
  */
-struct _GstVaapiDecoderH264Class {
-    /*< private >*/
-    GstVaapiDecoderClass parent_class;
+struct _GstVaapiDecoderH264Class
+{
+  /*< private > */
+  GstVaapiDecoderClass parent_class;
 };
 
 static gboolean
-exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+exec_ref_pic_marking (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture);
 
 static gboolean
-exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder);
+exec_ref_pic_marking_sliding_window (GstVaapiDecoderH264 * decoder);
 
 static gboolean
-is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture);
+is_inter_view_reference_for_next_pictures (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture);
 
 static inline gboolean
-is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
-    GstVaapiFrameStore *fs)
+is_inter_view_reference_for_next_frames (GstVaapiDecoderH264 * decoder,
+    GstVaapiFrameStore * fs)
 {
-    return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
+  return is_inter_view_reference_for_next_pictures (decoder, fs->buffers[0]);
 }
 
 /* Determines if the supplied profile is one of the MVC set */
 static gboolean
-is_mvc_profile(GstH264Profile profile)
+is_mvc_profile (GstH264Profile profile)
 {
-    return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
-        profile == GST_H264_PROFILE_STEREO_HIGH;
+  return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
+      profile == GST_H264_PROFILE_STEREO_HIGH;
 }
 
 /* Determines the view_id from the supplied NAL unit */
 static inline guint
-get_view_id(GstH264NalUnit *nalu)
+get_view_id (GstH264NalUnit * nalu)
 {
-    return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
+  return GST_H264_IS_MVC_NALU (nalu) ? nalu->extension.mvc.view_id : 0;
 }
 
 /* Determines the view order index (VOIdx) from the supplied view_id */
 static gint
-get_view_order_index(GstH264SPS *sps, guint16 view_id)
+get_view_order_index (GstH264SPS * sps, guint16 view_id)
 {
-    GstH264SPSExtMVC *mvc;
-    gint i;
+  GstH264SPSExtMVC *mvc;
+  gint i;
 
-    if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
-        return 0;
+  if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+    return 0;
 
-    mvc = &sps->extension.mvc;
-    for (i = 0; i <= mvc->num_views_minus1; i++) {
-        if (mvc->view[i].view_id == view_id)
-            return i;
-    }
-    GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
-    return -1;
+  mvc = &sps->extension.mvc;
+  for (i = 0; i <= mvc->num_views_minus1; i++) {
+    if (mvc->view[i].view_id == view_id)
+      return i;
+  }
+  GST_ERROR ("failed to find VOIdx from view_id (%d)", view_id);
+  return -1;
 }
 
 /* Determines NumViews */
 static guint
-get_num_views(GstH264SPS *sps)
+get_num_views (GstH264SPS * sps)
 {
-    return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
-        sps->extension.mvc.num_views_minus1 : 0);
+  return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
+      sps->extension.mvc.num_views_minus1 : 0);
 }
 
 /* Get number of reference frames to use */
 static guint
-get_max_dec_frame_buffering(GstH264SPS *sps)
-{
-    guint num_views, max_dpb_frames;
-    guint max_dec_frame_buffering, PicSizeMbs;
-    GstVaapiLevelH264 level;
-    const GstVaapiH264LevelLimits *level_limits;
-
-    /* Table A-1 - Level limits */
-    if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
-        level = GST_VAAPI_LEVEL_H264_L1b;
-    else
-        level = gst_vaapi_utils_h264_get_level(sps->level_idc);
-    level_limits = gst_vaapi_utils_h264_get_level_limits(level);
-    if (G_UNLIKELY(!level_limits)) {
-        GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
-        max_dec_frame_buffering = 16;
-    }
+get_max_dec_frame_buffering (GstH264SPS * sps)
+{
+  guint num_views, max_dpb_frames;
+  guint max_dec_frame_buffering, PicSizeMbs;
+  GstVaapiLevelH264 level;
+  const GstVaapiH264LevelLimits *level_limits;
+
+  /* Table A-1 - Level limits */
+  if (G_UNLIKELY (sps->level_idc == 11 && sps->constraint_set3_flag))
+    level = GST_VAAPI_LEVEL_H264_L1b;
+  else
+    level = gst_vaapi_utils_h264_get_level (sps->level_idc);
+  level_limits = gst_vaapi_utils_h264_get_level_limits (level);
+  if (G_UNLIKELY (!level_limits)) {
+    GST_FIXME ("unsupported level_idc value (%d)", sps->level_idc);
+    max_dec_frame_buffering = 16;
+  } else {
+    PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
+        (sps->pic_height_in_map_units_minus1 + 1) *
+        (sps->frame_mbs_only_flag ? 1 : 2));
+    max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
+  }
+  if (is_mvc_profile (sps->profile_idc))
+    max_dec_frame_buffering <<= 1;
+
+  /* VUI parameters */
+  if (sps->vui_parameters_present_flag) {
+    GstH264VUIParams *const vui_params = &sps->vui_parameters;
+    if (vui_params->bitstream_restriction_flag)
+      max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
     else {
-        PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
-                      (sps->pic_height_in_map_units_minus1 + 1) *
-                      (sps->frame_mbs_only_flag ? 1 : 2));
-        max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
-    }
-    if (is_mvc_profile(sps->profile_idc))
-        max_dec_frame_buffering <<= 1;
-
-    /* VUI parameters */
-    if (sps->vui_parameters_present_flag) {
-        GstH264VUIParams * const vui_params = &sps->vui_parameters;
-        if (vui_params->bitstream_restriction_flag)
-            max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
-        else {
-            switch (sps->profile_idc) {
-            case 44:  // CAVLC 4:4:4 Intra profile
-            case GST_H264_PROFILE_SCALABLE_HIGH:
-            case GST_H264_PROFILE_HIGH:
-            case GST_H264_PROFILE_HIGH10:
-            case GST_H264_PROFILE_HIGH_422:
-            case GST_H264_PROFILE_HIGH_444:
-                if (sps->constraint_set3_flag)
-                    max_dec_frame_buffering = 0;
-                break;
-            }
-        }
-    }
-
-    num_views = get_num_views(sps);
-    max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
-    if (max_dec_frame_buffering > max_dpb_frames)
-        max_dec_frame_buffering = max_dpb_frames;
-    else if (max_dec_frame_buffering < sps->num_ref_frames)
-        max_dec_frame_buffering = sps->num_ref_frames;
-    return MAX(1, max_dec_frame_buffering);
+      switch (sps->profile_idc) {
+        case 44:               // CAVLC 4:4:4 Intra profile
+        case GST_H264_PROFILE_SCALABLE_HIGH:
+        case GST_H264_PROFILE_HIGH:
+        case GST_H264_PROFILE_HIGH10:
+        case GST_H264_PROFILE_HIGH_422:
+        case GST_H264_PROFILE_HIGH_444:
+          if (sps->constraint_set3_flag)
+            max_dec_frame_buffering = 0;
+          break;
+      }
+    }
+  }
+
+  num_views = get_num_views (sps);
+  max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage (num_views - 1) : 1);
+  if (max_dec_frame_buffering > max_dpb_frames)
+    max_dec_frame_buffering = max_dpb_frames;
+  else if (max_dec_frame_buffering < sps->num_ref_frames)
+    max_dec_frame_buffering = sps->num_ref_frames;
+  return MAX (1, max_dec_frame_buffering);
 }
 
 static void
-array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
+array_remove_index_fast (void *array, guint * array_length_ptr, guint index)
 {
-    gpointer * const entries = array;
-    guint num_entries = *array_length_ptr;
+  gpointer *const entries = array;
+  guint num_entries = *array_length_ptr;
 
-    g_return_if_fail(index < num_entries);
+  g_return_if_fail (index < num_entries);
 
-    if (index != --num_entries)
-        entries[index] = entries[num_entries];
-    entries[num_entries] = NULL;
-    *array_length_ptr = num_entries;
+  if (index != --num_entries)
+    entries[index] = entries[num_entries];
+  entries[num_entries] = NULL;
+  *array_length_ptr = num_entries;
 }
 
 #if 1
 static inline void
-array_remove_index(void *array, guint *array_length_ptr, guint index)
+array_remove_index (void *array, guint * array_length_ptr, guint index)
 {
-    array_remove_index_fast(array, array_length_ptr, index);
+  array_remove_index_fast (array, array_length_ptr, index);
 }
 #else
 static void
-array_remove_index(void *array, guint *array_length_ptr, guint index)
+array_remove_index (void *array, guint * array_length_ptr, guint index)
 {
-    gpointer * const entries = array;
-    const guint num_entries = *array_length_ptr - 1;
-    guint i;
+  gpointer *const entries = array;
+  const guint num_entries = *array_length_ptr - 1;
+  guint i;
 
-    g_return_if_fail(index <= num_entries);
+  g_return_if_fail (index <= num_entries);
 
-    for (i = index; i < num_entries; i++)
-        entries[i] = entries[i + 1];
-    entries[num_entries] = NULL;
-    *array_length_ptr = num_entries;
+  for (i = index; i < num_entries; i++)
+    entries[i] = entries[i + 1];
+  entries[num_entries] = NULL;
+  *array_length_ptr = num_entries;
 }
 #endif
 
@@ -709,1690 +710,1654 @@ array_remove_index(void *array, guint *array_length_ptr, guint index)
     array_remove_index(array, &array##_count, index)
 
 static void
-dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
+dpb_remove_index (GstVaapiDecoderH264 * decoder, guint index)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i, num_frames = --priv->dpb_count;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i, num_frames = --priv->dpb_count;
 
-    if (USE_STRICT_DPB_ORDERING) {
-        for (i = index; i < num_frames; i++)
-            gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
-    }
-    else if (index != num_frames)
-        gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
-    gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
+  if (USE_STRICT_DPB_ORDERING) {
+    for (i = index; i < num_frames; i++)
+      gst_vaapi_frame_store_replace (&priv->dpb[i], priv->dpb[i + 1]);
+  } else if (index != num_frames)
+    gst_vaapi_frame_store_replace (&priv->dpb[index], priv->dpb[num_frames]);
+  gst_vaapi_frame_store_replace (&priv->dpb[num_frames], NULL);
 }
 
 static gboolean
-dpb_output(GstVaapiDecoderH264 *decoder, GstVaapiFrameStore *fs)
+dpb_output (GstVaapiDecoderH264 * decoder, GstVaapiFrameStore * fs)
 {
-    GstVaapiPictureH264 *picture = NULL;
-    guint i;
+  GstVaapiPictureH264 *picture = NULL;
+  guint i;
 
-    g_return_val_if_fail(fs != NULL, FALSE);
+  g_return_val_if_fail (fs != NULL, FALSE);
 
-    fs->output_called++;
-    if (!gst_vaapi_frame_store_is_complete(fs))
-        return TRUE;
-
-    for (i = 0; i < fs->num_buffers; i++) {
-        GstVaapiPictureH264 * const pic = fs->buffers[i];
-        g_return_val_if_fail(pic != NULL, FALSE);
-        pic->output_needed = FALSE;
-        if (!GST_VAAPI_PICTURE_FLAG_IS_SET(pic, GST_VAAPI_PICTURE_FLAG_GHOST))
-            picture = pic;
-    }
+  fs->output_called++;
+  if (!gst_vaapi_frame_store_is_complete (fs))
+    return TRUE;
 
-    fs->output_needed = 0;
-    fs->output_called = 0;
-    if (!picture)
-        return TRUE;
-    return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
+  for (i = 0; i < fs->num_buffers; i++) {
+    GstVaapiPictureH264 *const pic = fs->buffers[i];
+    g_return_val_if_fail (pic != NULL, FALSE);
+    pic->output_needed = FALSE;
+    if (!GST_VAAPI_PICTURE_FLAG_IS_SET (pic, GST_VAAPI_PICTURE_FLAG_GHOST))
+      picture = pic;
+  }
+
+  fs->output_needed = 0;
+  fs->output_called = 0;
+  if (!picture)
+    return TRUE;
+  return gst_vaapi_picture_output (GST_VAAPI_PICTURE_CAST (picture));
 }
 
 static inline void
-dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
+dpb_evict (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture,
+    guint i)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiFrameStore * const fs = priv->dpb[i];
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiFrameStore *const fs = priv->dpb[i];
 
-    if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
-        dpb_remove_index(decoder, i);
+  if (!fs->output_needed && !gst_vaapi_frame_store_has_reference (fs))
+    dpb_remove_index (decoder, i);
 }
 
 /* Finds the picture with the nearest previous POC and same structure */
 static gint
-dpb_find_nearest_prev_poc(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture, guint picture_structure,
-    GstVaapiPictureH264 **found_picture_ptr)
+dpb_find_nearest_prev_poc (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, guint picture_structure,
+    GstVaapiPictureH264 ** found_picture_ptr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *found_picture = NULL;
-    guint i, j, found_index;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *found_picture = NULL;
+  guint i, j, found_index;
 
-    g_return_val_if_fail(picture != NULL, -1);
+  g_return_val_if_fail (picture != NULL, -1);
 
-    if (!picture_structure)
-        picture_structure = picture->base.structure;
+  if (!picture_structure)
+    picture_structure = picture->base.structure;
 
-    for (i = 0; i < priv->dpb_count; i++) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if (picture->base.view_id != fs->view_id)
-            continue;
-        for (j = 0; j < fs->num_buffers; j++) {
-            GstVaapiPictureH264 * const pic = fs->buffers[j];
-            if (pic->base.structure != picture_structure)
-                continue;
-            if (pic->base.poc >= picture->base.poc)
-                continue;
-            if (!found_picture || found_picture->base.poc < pic->base.poc)
-                found_picture = pic, found_index = i;
-        }
+  for (i = 0; i < priv->dpb_count; i++) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if (picture->base.view_id != fs->view_id)
+      continue;
+    for (j = 0; j < fs->num_buffers; j++) {
+      GstVaapiPictureH264 *const pic = fs->buffers[j];
+      if (pic->base.structure != picture_structure)
+        continue;
+      if (pic->base.poc >= picture->base.poc)
+        continue;
+      if (!found_picture || found_picture->base.poc < pic->base.poc)
+        found_picture = pic, found_index = i;
     }
+  }
 
-    if (found_picture_ptr)
-        *found_picture_ptr = found_picture;
-    return found_picture ? found_index : -1;
+  if (found_picture_ptr)
+    *found_picture_ptr = found_picture;
+  return found_picture ? found_index : -1;
 }
 
 /* Finds the picture with the lowest POC that needs to be output */
 static gint
-dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
-    GstVaapiPictureH264 **found_picture_ptr)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *found_picture = NULL;
-    guint i, j, found_index;
-
-    for (i = 0; i < priv->dpb_count; i++) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if (!fs->output_needed)
-            continue;
-        if (picture && picture->base.view_id != fs->view_id)
-            continue;
-        for (j = 0; j < fs->num_buffers; j++) {
-            GstVaapiPictureH264 * const pic = fs->buffers[j];
-            if (!pic->output_needed)
-                continue;
-            if (!found_picture || found_picture->base.poc > pic->base.poc ||
-                (found_picture->base.poc == pic->base.poc &&
-                 found_picture->base.voc > pic->base.voc))
-                found_picture = pic, found_index = i;
-        }
-    }
-
-    if (found_picture_ptr)
-        *found_picture_ptr = found_picture;
-    return found_picture ? found_index : -1;
+dpb_find_lowest_poc (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstVaapiPictureH264 ** found_picture_ptr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *found_picture = NULL;
+  guint i, j, found_index;
+
+  for (i = 0; i < priv->dpb_count; i++) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if (!fs->output_needed)
+      continue;
+    if (picture && picture->base.view_id != fs->view_id)
+      continue;
+    for (j = 0; j < fs->num_buffers; j++) {
+      GstVaapiPictureH264 *const pic = fs->buffers[j];
+      if (!pic->output_needed)
+        continue;
+      if (!found_picture || found_picture->base.poc > pic->base.poc ||
+          (found_picture->base.poc == pic->base.poc &&
+              found_picture->base.voc > pic->base.voc))
+        found_picture = pic, found_index = i;
+    }
+  }
+
+  if (found_picture_ptr)
+    *found_picture_ptr = found_picture;
+  return found_picture ? found_index : -1;
 }
 
 /* Finds the picture with the lowest VOC that needs to be output */
 static gint
-dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
-    GstVaapiPictureH264 **found_picture_ptr)
+dpb_find_lowest_voc (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstVaapiPictureH264 ** found_picture_ptr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *found_picture = NULL;
-    guint i, j, found_index;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *found_picture = NULL;
+  guint i, j, found_index;
 
-    for (i = 0; i < priv->dpb_count; i++) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if (!fs->output_needed || fs->view_id == picture->base.view_id)
-            continue;
-        for (j = 0; j < fs->num_buffers; j++) {
-            GstVaapiPictureH264 * const pic = fs->buffers[j];
-            if (!pic->output_needed || pic->base.poc != picture->base.poc)
-                continue;
-            if (!found_picture || found_picture->base.voc > pic->base.voc)
-                found_picture = pic, found_index = i;
-        }
+  for (i = 0; i < priv->dpb_count; i++) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if (!fs->output_needed || fs->view_id == picture->base.view_id)
+      continue;
+    for (j = 0; j < fs->num_buffers; j++) {
+      GstVaapiPictureH264 *const pic = fs->buffers[j];
+      if (!pic->output_needed || pic->base.poc != picture->base.poc)
+        continue;
+      if (!found_picture || found_picture->base.voc > pic->base.voc)
+        found_picture = pic, found_index = i;
     }
+  }
 
-    if (found_picture_ptr)
-        *found_picture_ptr = found_picture;
-    return found_picture ? found_index : -1;
+  if (found_picture_ptr)
+    *found_picture_ptr = found_picture;
+  return found_picture ? found_index : -1;
 }
 
 static gboolean
-dpb_output_other_views(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture, guint voc)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *found_picture;
-    gint found_index;
-    gboolean success;
-
-    if (priv->max_views == 1)
-        return TRUE;
-
-    /* Emit all other view components that were in the same access
-       unit than the picture we have just found */
-    found_picture = picture;
-    for (;;) {
-        found_index = dpb_find_lowest_voc(decoder, found_picture,
-            &found_picture);
-        if (found_index < 0 || found_picture->base.voc >= voc)
-            break;
-        success = dpb_output(decoder, priv->dpb[found_index]);
-        dpb_evict(decoder, found_picture, found_index);
-        if (!success)
-            return FALSE;
-    }
+dpb_output_other_views (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, guint voc)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *found_picture;
+  gint found_index;
+  gboolean success;
+
+  if (priv->max_views == 1)
     return TRUE;
+
+  /* Emit all other view components that were in the same access
+     unit than the picture we have just found */
+  found_picture = picture;
+  for (;;) {
+    found_index = dpb_find_lowest_voc (decoder, found_picture, &found_picture);
+    if (found_index < 0 || found_picture->base.voc >= voc)
+      break;
+    success = dpb_output (decoder, priv->dpb[found_index]);
+    dpb_evict (decoder, found_picture, found_index);
+    if (!success)
+      return FALSE;
+  }
+  return TRUE;
 }
 
 static gboolean
-dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+dpb_bump (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *found_picture;
-    gint found_index;
-    gboolean success;
-
-    found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
-    if (found_index < 0)
-        return FALSE;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *found_picture;
+  gint found_index;
+  gboolean success;
 
-    if (picture && picture->base.poc != found_picture->base.poc)
-        dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
+  found_index = dpb_find_lowest_poc (decoder, picture, &found_picture);
+  if (found_index < 0)
+    return FALSE;
 
-    success = dpb_output(decoder, priv->dpb[found_index]);
-    dpb_evict(decoder, found_picture, found_index);
-    if (priv->max_views == 1)
-        return success;
+  if (picture && picture->base.poc != found_picture->base.poc)
+    dpb_output_other_views (decoder, found_picture, found_picture->base.voc);
 
-    if (picture && picture->base.poc != found_picture->base.poc)
-        dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
+  success = dpb_output (decoder, priv->dpb[found_index]);
+  dpb_evict (decoder, found_picture, found_index);
+  if (priv->max_views == 1)
     return success;
+
+  if (picture && picture->base.poc != found_picture->base.poc)
+    dpb_output_other_views (decoder, found_picture, G_MAXUINT32);
+  return success;
 }
 
 static void
-dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i, n;
-
-    for (i = 0; i < priv->dpb_count; i++) {
-        if (picture && picture->base.view_id != priv->dpb[i]->view_id)
-            continue;
-        gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
-    }
-
-    /* Compact the resulting DPB, i.e. remove holes */
-    for (i = 0, n = 0; i < priv->dpb_count; i++) {
-        if (priv->dpb[i]) {
-            if (i != n) {
-                priv->dpb[n] = priv->dpb[i];
-                priv->dpb[i] = NULL;
-            }
-            n++;
-        }
-    }
-    priv->dpb_count = n;
-
-    /* Clear previous frame buffers only if this is a "flush-all" operation,
-       or if the picture is the first one in the access unit */
-    if (priv->prev_frames && (!picture ||
-            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_AU_START))) {
-        for (i = 0; i < priv->max_views; i++)
-            gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
-    }
-
-    /* Clear previous reference frame buffers only if this is a "flush-all"
-       operation, or if the picture is part of an IDR NAL */
-    if (priv->prev_ref_frames && (!picture ||
-            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_IDR))) {
-        for (i = 0; i < priv->max_views; i++)
-            gst_vaapi_frame_store_replace(&priv->prev_ref_frames[i], NULL);
-    }
+dpb_clear (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i, n;
+
+  for (i = 0; i < priv->dpb_count; i++) {
+    if (picture && picture->base.view_id != priv->dpb[i]->view_id)
+      continue;
+    gst_vaapi_frame_store_replace (&priv->dpb[i], NULL);
+  }
+
+  /* Compact the resulting DPB, i.e. remove holes */
+  for (i = 0, n = 0; i < priv->dpb_count; i++) {
+    if (priv->dpb[i]) {
+      if (i != n) {
+        priv->dpb[n] = priv->dpb[i];
+        priv->dpb[i] = NULL;
+      }
+      n++;
+    }
+  }
+  priv->dpb_count = n;
+
+  /* Clear previous frame buffers only if this is a "flush-all" operation,
+     or if the picture is the first one in the access unit */
+  if (priv->prev_frames && (!picture ||
+          GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
+              GST_VAAPI_PICTURE_FLAG_AU_START))) {
+    for (i = 0; i < priv->max_views; i++)
+      gst_vaapi_frame_store_replace (&priv->prev_frames[i], NULL);
+  }
+
+  /* Clear previous reference frame buffers only if this is a "flush-all"
+     operation, or if the picture is part of an IDR NAL */
+  if (priv->prev_ref_frames && (!picture ||
+          GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
+              GST_VAAPI_PICTURE_FLAG_IDR))) {
+    for (i = 0; i < priv->max_views; i++)
+      gst_vaapi_frame_store_replace (&priv->prev_ref_frames[i], NULL);
+  }
 }
 
 static void
-dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+dpb_flush (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i;
 
-    /* Detect broken frames and mark them as having a single field if
-       needed */
-    for (i = 0; i < priv->dpb_count; i++) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if (!fs->output_needed || gst_vaapi_frame_store_is_complete(fs))
-            continue;
-        GST_VAAPI_PICTURE_FLAG_SET(fs->buffers[0],
-            GST_VAAPI_PICTURE_FLAG_ONEFIELD);
-    }
+  /* Detect broken frames and mark them as having a single field if
+     needed */
+  for (i = 0; i < priv->dpb_count; i++) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if (!fs->output_needed || gst_vaapi_frame_store_is_complete (fs))
+      continue;
+    GST_VAAPI_PICTURE_FLAG_SET (fs->buffers[0],
+        GST_VAAPI_PICTURE_FLAG_ONEFIELD);
+  }
 
-    /* Output any frame remaining in DPB */
-    while (dpb_bump(decoder, picture))
-        ;
-    dpb_clear(decoder, picture);
+  /* Output any frame remaining in DPB */
+  while (dpb_bump (decoder, picture));
+  dpb_clear (decoder, picture);
 }
 
 static void
-dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    const gboolean is_last_picture = /* in the access unit */
-        GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
-    guint i;
-
-    // Remove all unused inter-view only reference components of the current AU
-    i = 0;
-    while (i < priv->dpb_count) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if (fs->view_id != picture->base.view_id &&
-            !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
-            (is_last_picture ||
-             !is_inter_view_reference_for_next_frames(decoder, fs)))
-            dpb_remove_index(decoder, i);
-        else
-            i++;
-    }
+dpb_prune_mvc (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  const gboolean is_last_picture =      /* in the access unit */
+      GST_VAAPI_PICTURE_FLAG_IS_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+  guint i;
+
+  // Remove all unused inter-view only reference components of the current AU
+  i = 0;
+  while (i < priv->dpb_count) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if (fs->view_id != picture->base.view_id &&
+        !fs->output_needed && !gst_vaapi_frame_store_has_reference (fs) &&
+        (is_last_picture ||
+            !is_inter_view_reference_for_next_frames (decoder, fs)))
+      dpb_remove_index (decoder, i);
+    else
+      i++;
+  }
 }
 
 static gboolean
-dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiFrameStore *fs;
-    guint i;
-
-    if (priv->max_views > 1)
-        dpb_prune_mvc(decoder, picture);
-
-    // Remove all unused pictures
-    if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
-        i = 0;
-        while (i < priv->dpb_count) {
-            GstVaapiFrameStore * const fs = priv->dpb[i];
-            if (fs->view_id == picture->base.view_id &&
-                !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
-                dpb_remove_index(decoder, i);
-            else
-                i++;
-        }
-    }
+dpb_add (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiFrameStore *fs;
+  guint i;
 
-    // Check if picture is the second field and the first field is still in DPB
-    if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
-        !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
-        fs = priv->prev_frames[picture->base.voc];
-        if (!fs || &fs->buffers[0]->base != picture->base.parent_picture)
-            return FALSE;
-        if (!gst_vaapi_frame_store_add(fs, picture))
-            return FALSE;
-
-        if (fs->output_called)
-            return dpb_output(decoder, fs);
-        return TRUE;
-    }
+  if (priv->max_views > 1)
+    dpb_prune_mvc (decoder, picture);
 
-    // Try to output the previous frame again if it was not submitted yet
-    // e.g. delayed while waiting for the next field, or a field gap was closed
+  // Remove all unused pictures
+  if (!GST_VAAPI_PICTURE_IS_IDR (picture)) {
+    i = 0;
+    while (i < priv->dpb_count) {
+      GstVaapiFrameStore *const fs = priv->dpb[i];
+      if (fs->view_id == picture->base.view_id &&
+          !fs->output_needed && !gst_vaapi_frame_store_has_reference (fs))
+        dpb_remove_index (decoder, i);
+      else
+        i++;
+    }
+  }
+  // Check if picture is the second field and the first field is still in DPB
+  if (GST_VAAPI_PICTURE_IS_INTERLACED (picture) &&
+      !GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture)) {
     fs = priv->prev_frames[picture->base.voc];
-    if (fs && fs->output_called)
-        dpb_output(decoder, fs);
+    if (!fs || &fs->buffers[0]->base != picture->base.parent_picture)
+      return FALSE;
+    if (!gst_vaapi_frame_store_add (fs, picture))
+      return FALSE;
 
-    // Create new frame store, and split fields if necessary
-    fs = gst_vaapi_frame_store_new(picture);
-    if (!fs)
+    if (fs->output_called)
+      return dpb_output (decoder, fs);
+    return TRUE;
+  }
+  // Try to output the previous frame again if it was not submitted yet
+  // e.g. delayed while waiting for the next field, or a field gap was closed
+  fs = priv->prev_frames[picture->base.voc];
+  if (fs && fs->output_called)
+    dpb_output (decoder, fs);
+
+  // Create new frame store, and split fields if necessary
+  fs = gst_vaapi_frame_store_new (picture);
+  if (!fs)
+    return FALSE;
+  gst_vaapi_frame_store_replace (&priv->prev_frames[picture->base.voc], fs);
+  gst_vaapi_frame_store_unref (fs);
+
+  if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame (fs)) {
+    if (!gst_vaapi_frame_store_split_fields (fs, priv->top_field_first))
+      return FALSE;
+  }
+  // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
+  if (GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
+    while (priv->dpb_count == priv->dpb_size) {
+      if (!dpb_bump (decoder, picture))
         return FALSE;
-    gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
-    gst_vaapi_frame_store_unref(fs);
-
-    if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
-        if (!gst_vaapi_frame_store_split_fields(fs, priv->top_field_first))
-            return FALSE;
-    }
-
-    // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
-    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
-        while (priv->dpb_count == priv->dpb_size) {
-            if (!dpb_bump(decoder, picture))
-                return FALSE;
-        }
-        gst_vaapi_frame_store_replace(&priv->prev_ref_frames[picture->base.voc],
-            fs);
     }
-
-    // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
-    else {
-        const gboolean StoreInterViewOnlyRefFlag =
-            !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_AU_END) &&
-            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
-        if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
-            return TRUE;
-        while (priv->dpb_count == priv->dpb_size) {
-            GstVaapiPictureH264 *found_picture;
-            if (!StoreInterViewOnlyRefFlag) {
-                if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
-                    found_picture->base.poc > picture->base.poc)
-                    return dpb_output(decoder, fs);
-            }
-            if (!dpb_bump(decoder, picture))
-                return FALSE;
-        }
+    gst_vaapi_frame_store_replace (&priv->prev_ref_frames[picture->base.voc],
+        fs);
+  }
+  // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
+  else {
+    const gboolean StoreInterViewOnlyRefFlag =
+        !GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
+        GST_VAAPI_PICTURE_FLAG_AU_END) &&
+        GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
+        GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+    if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
+      return TRUE;
+    while (priv->dpb_count == priv->dpb_size) {
+      GstVaapiPictureH264 *found_picture;
+      if (!StoreInterViewOnlyRefFlag) {
+        if (dpb_find_lowest_poc (decoder, picture, &found_picture) < 0 ||
+            found_picture->base.poc > picture->base.poc)
+          return dpb_output (decoder, fs);
+      }
+      if (!dpb_bump (decoder, picture))
+        return FALSE;
     }
-    gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
-    return TRUE;
+  }
+  gst_vaapi_frame_store_replace (&priv->dpb[priv->dpb_count++], fs);
+  return TRUE;
 }
 
 static gboolean
-dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
+dpb_reset (GstVaapiDecoderH264 * decoder, guint dpb_size)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    if (dpb_size > priv->dpb_size_max) {
-        priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
-        if (!priv->dpb)
-            return FALSE;
-        memset(&priv->dpb[priv->dpb_size_max], 0,
-            (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
-        priv->dpb_size_max = dpb_size;
-    }
-    priv->dpb_size = dpb_size;
+  if (dpb_size > priv->dpb_size_max) {
+    priv->dpb = g_try_realloc_n (priv->dpb, dpb_size, sizeof (*priv->dpb));
+    if (!priv->dpb)
+      return FALSE;
+    memset (&priv->dpb[priv->dpb_size_max], 0,
+        (dpb_size - priv->dpb_size_max) * sizeof (*priv->dpb));
+    priv->dpb_size_max = dpb_size;
+  }
+  priv->dpb_size = dpb_size;
 
-    GST_DEBUG("DPB size %u", priv->dpb_size);
-    return TRUE;
+  GST_DEBUG ("DPB size %u", priv->dpb_size);
+  return TRUE;
 }
 
 static void
-unref_inter_view(GstVaapiPictureH264 *picture)
+unref_inter_view (GstVaapiPictureH264 * picture)
 {
-    if (!picture)
-        return;
-    GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
-    gst_vaapi_picture_unref(picture);
+  if (!picture)
+    return;
+  GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+  gst_vaapi_picture_unref (picture);
 }
 
 /* Resets MVC resources */
 static gboolean
-mvc_reset(GstVaapiDecoderH264 *decoder)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i;
-
-    // Resize array of inter-view references
-    if (!priv->inter_views) {
-        priv->inter_views = g_ptr_array_new_full(priv->max_views,
-            (GDestroyNotify)unref_inter_view);
-        if (!priv->inter_views)
-            return FALSE;
-    }
-
-    // Resize array of previous frame buffers
-    for (i = priv->max_views; i < priv->prev_frames_alloc; i++) {
-        gst_vaapi_frame_store_replace(&priv->prev_ref_frames[i], NULL);
-        gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
-    }
-
-    priv->prev_ref_frames = g_try_realloc_n(priv->prev_ref_frames,
-        priv->max_views, sizeof(*priv->prev_ref_frames));
-    if (!priv->prev_ref_frames)
-        goto error_allocate;
-
-    priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
-        sizeof(*priv->prev_frames));
-    if (!priv->prev_frames)
-        goto error_allocate;
-
-    for (i = priv->prev_frames_alloc; i < priv->max_views; i++) {
-        priv->prev_ref_frames[i] = NULL;
-        priv->prev_frames[i] = NULL;
-    }
-    priv->prev_frames_alloc = priv->max_views;
-    return TRUE;
-
-    /* ERRORS */
+mvc_reset (GstVaapiDecoderH264 * decoder)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i;
+
+  // Resize array of inter-view references
+  if (!priv->inter_views) {
+    priv->inter_views = g_ptr_array_new_full (priv->max_views,
+        (GDestroyNotify) unref_inter_view);
+    if (!priv->inter_views)
+      return FALSE;
+  }
+  // Resize array of previous frame buffers
+  for (i = priv->max_views; i < priv->prev_frames_alloc; i++) {
+    gst_vaapi_frame_store_replace (&priv->prev_ref_frames[i], NULL);
+    gst_vaapi_frame_store_replace (&priv->prev_frames[i], NULL);
+  }
+
+  priv->prev_ref_frames = g_try_realloc_n (priv->prev_ref_frames,
+      priv->max_views, sizeof (*priv->prev_ref_frames));
+  if (!priv->prev_ref_frames)
+    goto error_allocate;
+
+  priv->prev_frames = g_try_realloc_n (priv->prev_frames, priv->max_views,
+      sizeof (*priv->prev_frames));
+  if (!priv->prev_frames)
+    goto error_allocate;
+
+  for (i = priv->prev_frames_alloc; i < priv->max_views; i++) {
+    priv->prev_ref_frames[i] = NULL;
+    priv->prev_frames[i] = NULL;
+  }
+  priv->prev_frames_alloc = priv->max_views;
+  return TRUE;
+
+  /* ERRORS */
 error_allocate:
-    g_free(priv->prev_ref_frames);
-    priv->prev_ref_frames = NULL;
-    g_free(priv->prev_frames);
-    priv->prev_frames = NULL;
-    priv->prev_frames_alloc = 0;
-    return FALSE;
+  g_free (priv->prev_ref_frames);
+  priv->prev_ref_frames = NULL;
+  g_free (priv->prev_frames);
+  priv->prev_frames = NULL;
+  priv->prev_frames_alloc = 0;
+  return FALSE;
 }
 
 static GstVaapiDecoderStatus
-get_status(GstH264ParserResult result)
+get_status (GstH264ParserResult result)
 {
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderStatus status;
 
-    switch (result) {
+  switch (result) {
     case GST_H264_PARSER_OK:
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      break;
     case GST_H264_PARSER_NO_NAL_END:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      break;
     case GST_H264_PARSER_ERROR:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
     default:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-        break;
-    }
-    return status;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+      break;
+  }
+  return status;
 }
 
 static void
-gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_close (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    gst_vaapi_picture_replace(&priv->missing_picture, NULL);
-    gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
-    gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  gst_vaapi_picture_replace (&priv->missing_picture, NULL);
+  gst_vaapi_parser_info_h264_replace (&priv->prev_slice_pi, NULL);
+  gst_vaapi_parser_info_h264_replace (&priv->prev_pi, NULL);
 
-    dpb_clear(decoder, NULL);
+  dpb_clear (decoder, NULL);
 
-    if (priv->inter_views) {
-        g_ptr_array_unref(priv->inter_views);
-        priv->inter_views = NULL;
-    }
+  if (priv->inter_views) {
+    g_ptr_array_unref (priv->inter_views);
+    priv->inter_views = NULL;
+  }
 
-    if (priv->parser) {
-        gst_h264_nal_parser_free(priv->parser);
-        priv->parser = NULL;
-    }
+  if (priv->parser) {
+    gst_h264_nal_parser_free (priv->parser);
+    priv->parser = NULL;
+  }
 }
 
 static gboolean
-gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_open (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    gst_vaapi_decoder_h264_close(decoder);
+  gst_vaapi_decoder_h264_close (decoder);
 
-    priv->parser = gst_h264_nal_parser_new();
-    if (!priv->parser)
-        return FALSE;
-    return TRUE;
+  priv->parser = gst_h264_nal_parser_new ();
+  if (!priv->parser)
+    return FALSE;
+  return TRUE;
 }
 
 static void
-gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_h264_destroy (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i;
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i;
 
-    gst_vaapi_decoder_h264_close(decoder);
+  gst_vaapi_decoder_h264_close (decoder);
 
-    g_free(priv->dpb);
-    priv->dpb = NULL;
-    priv->dpb_size = 0;
+  g_free (priv->dpb);
+  priv->dpb = NULL;
+  priv->dpb_size = 0;
 
-    g_free(priv->prev_ref_frames);
-    priv->prev_ref_frames = NULL;
-    g_free(priv->prev_frames);
-    priv->prev_frames = NULL;
-    priv->prev_frames_alloc = 0;
+  g_free (priv->prev_ref_frames);
+  priv->prev_ref_frames = NULL;
+  g_free (priv->prev_frames);
+  priv->prev_frames = NULL;
+  priv->prev_frames_alloc = 0;
 
-    for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
-        gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
-    gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
+  for (i = 0; i < G_N_ELEMENTS (priv->pps); i++)
+    gst_vaapi_parser_info_h264_replace (&priv->pps[i], NULL);
+  gst_vaapi_parser_info_h264_replace (&priv->active_pps, NULL);
 
-    for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
-        gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
-    gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
+  for (i = 0; i < G_N_ELEMENTS (priv->sps); i++)
+    gst_vaapi_parser_info_h264_replace (&priv->sps[i], NULL);
+  gst_vaapi_parser_info_h264_replace (&priv->active_sps, NULL);
 }
 
 static gboolean
-gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
-{
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-
-    priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
-    priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
-    priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
-    priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
-    priv->progressive_sequence  = TRUE;
-    priv->top_field_first       = FALSE;
-    return TRUE;
+gst_vaapi_decoder_h264_create (GstVaapiDecoder * base_decoder)
+{
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+
+  priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
+  priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+  priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+  priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+  priv->progressive_sequence = TRUE;
+  priv->top_field_first = FALSE;
+  return TRUE;
 }
 
 /* Activates the supplied PPS */
 static GstH264PPS *
-ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
+ensure_pps (GstVaapiDecoderH264 * decoder, GstH264PPS * pps)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = priv->pps[pps->id];
 
-    gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
-    return pi ? &pi->data.pps : NULL;
+  gst_vaapi_parser_info_h264_replace (&priv->active_pps, pi);
+  return pi ? &pi->data.pps : NULL;
 }
 
 /* Returns the active PPS */
 static inline GstH264PPS *
-get_pps(GstVaapiDecoderH264 *decoder)
+get_pps (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
+  GstVaapiParserInfoH264 *const pi = decoder->priv.active_pps;
 
-    return pi ? &pi->data.pps : NULL;
+  return pi ? &pi->data.pps : NULL;
 }
 
 /* Activate the supplied SPS */
 static GstH264SPS *
-ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+ensure_sps (GstVaapiDecoderH264 * decoder, GstH264SPS * sps)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = priv->sps[sps->id];
 
-    /* Propagate "got I-frame" state to the next SPS unit if the
-       current sequence was not ended */
-    if (pi && priv->active_sps)
-        pi->state |= (priv->active_sps->state &
-            GST_H264_VIDEO_STATE_GOT_I_FRAME);
+  /* Propagate "got I-frame" state to the next SPS unit if the
+     current sequence was not ended */
+  if (pi && priv->active_sps)
+    pi->state |= (priv->active_sps->state & GST_H264_VIDEO_STATE_GOT_I_FRAME);
 
-    gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
-    return pi ? &pi->data.sps : NULL;
+  gst_vaapi_parser_info_h264_replace (&priv->active_sps, pi);
+  return pi ? &pi->data.sps : NULL;
 }
 
 /* Returns the active SPS */
 static inline GstH264SPS *
-get_sps(GstVaapiDecoderH264 *decoder)
+get_sps (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
+  GstVaapiParserInfoH264 *const pi = decoder->priv.active_sps;
 
-    return pi ? &pi->data.sps : NULL;
+  return pi ? &pi->data.sps : NULL;
 }
 
 static void
-fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
+fill_profiles (GstVaapiProfile profiles[16], guint * n_profiles_ptr,
     GstVaapiProfile profile)
 {
-    guint n_profiles = *n_profiles_ptr;
+  guint n_profiles = *n_profiles_ptr;
 
-    profiles[n_profiles++] = profile;
-    switch (profile) {
+  profiles[n_profiles++] = profile;
+  switch (profile) {
     case GST_VAAPI_PROFILE_H264_MAIN:
-        profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
-        break;
+      profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
+      break;
     default:
-        break;
-    }
-    *n_profiles_ptr = n_profiles;
+      break;
+  }
+  *n_profiles_ptr = n_profiles;
 }
 
 /* Fills in compatible profiles for MVC decoding */
 static void
-fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
-    guint *n_profiles_ptr, guint dpb_size)
-{
-    const gchar * const vendor_string =
-        gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
-
-    gboolean add_high_profile = FALSE;
-    struct map {
-        const gchar *str;
-        guint str_len;
+fill_profiles_mvc (GstVaapiDecoderH264 * decoder, GstVaapiProfile profiles[16],
+    guint * n_profiles_ptr, guint dpb_size)
+{
+  const gchar *const vendor_string =
+      gst_vaapi_display_get_vendor_string (GST_VAAPI_DECODER_DISPLAY (decoder));
+
+  gboolean add_high_profile = FALSE;
+  struct map
+  {
+    const gchar *str;
+    guint str_len;
+  };
+  const struct map *m;
+
+  // Drivers that support slice level decoding
+  if (vendor_string && dpb_size <= 16) {
+    static const struct map drv_names[] = {
+      {"Intel i965 driver", 17},
+      {NULL, 0}
     };
-    const struct map *m;
-
-    // Drivers that support slice level decoding
-    if (vendor_string && dpb_size <= 16) {
-        static const struct map drv_names[] = {
-            { "Intel i965 driver", 17 },
-            { NULL, 0 }
-        };
-        for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
-            if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
-                add_high_profile = TRUE;
-        }
+    for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
+      if (g_ascii_strncasecmp (vendor_string, m->str, m->str_len) == 0)
+        add_high_profile = TRUE;
     }
+  }
 
-    if (add_high_profile)
-        fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
+  if (add_high_profile)
+    fill_profiles (profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
 }
 
 static GstVaapiProfile
-get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
+get_profile (GstVaapiDecoderH264 * decoder, GstH264SPS * sps, guint dpb_size)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
-    GstVaapiProfile profile, profiles[4];
-    guint i, n_profiles = 0;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiDisplay *const display = GST_VAAPI_DECODER_DISPLAY (decoder);
+  GstVaapiProfile profile, profiles[4];
+  guint i, n_profiles = 0;
 
-    profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
-    if (!profile)
-        return GST_VAAPI_PROFILE_UNKNOWN;
+  profile = gst_vaapi_utils_h264_get_profile (sps->profile_idc);
+  if (!profile)
+    return GST_VAAPI_PROFILE_UNKNOWN;
 
-    fill_profiles(profiles, &n_profiles, profile);
-    switch (profile) {
+  fill_profiles (profiles, &n_profiles, profile);
+  switch (profile) {
     case GST_VAAPI_PROFILE_H264_BASELINE:
-        if (sps->constraint_set1_flag) { // A.2.2 (main profile)
-            fill_profiles(profiles, &n_profiles,
-                GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
-            fill_profiles(profiles, &n_profiles,
-                GST_VAAPI_PROFILE_H264_MAIN);
-        }
-        break;
+      if (sps->constraint_set1_flag) {  // A.2.2 (main profile)
+        fill_profiles (profiles, &n_profiles,
+            GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
+        fill_profiles (profiles, &n_profiles, GST_VAAPI_PROFILE_H264_MAIN);
+      }
+      break;
     case GST_VAAPI_PROFILE_H264_EXTENDED:
-        if (sps->constraint_set1_flag) { // A.2.2 (main profile)
-            fill_profiles(profiles, &n_profiles,
-                GST_VAAPI_PROFILE_H264_MAIN);
-        }
-        break;
+      if (sps->constraint_set1_flag) {  // A.2.2 (main profile)
+        fill_profiles (profiles, &n_profiles, GST_VAAPI_PROFILE_H264_MAIN);
+      }
+      break;
     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
-        if (priv->max_views == 2) {
-            fill_profiles(profiles, &n_profiles,
-                GST_VAAPI_PROFILE_H264_STEREO_HIGH);
-        }
-        fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
-        break;
+      if (priv->max_views == 2) {
+        fill_profiles (profiles, &n_profiles,
+            GST_VAAPI_PROFILE_H264_STEREO_HIGH);
+      }
+      fill_profiles_mvc (decoder, profiles, &n_profiles, dpb_size);
+      break;
     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
-        if (sps->frame_mbs_only_flag) {
-            fill_profiles(profiles, &n_profiles,
-                GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
-        }
-        fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
-        break;
+      if (sps->frame_mbs_only_flag) {
+        fill_profiles (profiles, &n_profiles,
+            GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
+      }
+      fill_profiles_mvc (decoder, profiles, &n_profiles, dpb_size);
+      break;
     default:
-        break;
-    }
+      break;
+  }
 
-    /* If the preferred profile (profiles[0]) matches one that we already
-       found, then just return it now instead of searching for it again */
-    if (profiles[0] == priv->profile)
-        return priv->profile;
+  /* If the preferred profile (profiles[0]) matches one that we already
+     found, then just return it now instead of searching for it again */
+  if (profiles[0] == priv->profile)
+    return priv->profile;
 
-    for (i = 0; i < n_profiles; i++) {
-        if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
-            return profiles[i];
-    }
-    return GST_VAAPI_PROFILE_UNKNOWN;
+  for (i = 0; i < n_profiles; i++) {
+    if (gst_vaapi_display_has_decoder (display, profiles[i], priv->entrypoint))
+      return profiles[i];
+  }
+  return GST_VAAPI_PROFILE_UNKNOWN;
 }
 
 static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
-{
-    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiContextInfo info;
-    GstVaapiProfile profile;
-    GstVaapiChromaType chroma_type;
-    gboolean reset_context = FALSE;
-    guint mb_width, mb_height, dpb_size, num_views;
-
-    num_views = get_num_views(sps);
-    if (priv->max_views < num_views) {
-        priv->max_views = num_views;
-        GST_DEBUG("maximum number of views changed to %u", num_views);
-    }
-
-    dpb_size = get_max_dec_frame_buffering(sps);
-    if (priv->dpb_size < dpb_size) {
-        GST_DEBUG("DPB size increased");
-        reset_context = TRUE;
-    }
-
-    profile = get_profile(decoder, sps, dpb_size);
-    if (!profile) {
-        GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-
-    if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
-        GST_DEBUG("profile changed to %x", profile);
-        reset_context = TRUE;
-        priv->profile = profile;
-    }
-
-    if (reset_context) {
-        switch (num_views) {
-          case 1:
-            /* Frame-packed mode details should be copied from the parser
-             * if we set NONE */
-            gst_vaapi_decoder_set_multiview_mode (base_decoder,
-                num_views, GST_VIDEO_MULTIVIEW_MODE_NONE,
-                GST_VIDEO_MULTIVIEW_FLAGS_NONE);
-            break;
-          case 2: /* Assume stereo */
-            if (profile == GST_VAAPI_PROFILE_H264_STEREO_HIGH) {
-              GST_DEBUG ("Stereo profile - frame-by-frame output, %d views", num_views);
-              gst_vaapi_decoder_set_multiview_mode (base_decoder,
-                  num_views, GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME,
-                  GST_VIDEO_MULTIVIEW_FLAGS_NONE);
-              break;
-            }
-            /* non-stereo 2 views. Fall through */
-          default:
-            GST_DEBUG ("Multiview profile - frame-by-frame output, %d views", num_views);
-            gst_vaapi_decoder_set_multiview_mode (base_decoder,
-                num_views, GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME,
-                GST_VIDEO_MULTIVIEW_FLAGS_NONE);
-            break;
+ensure_context (GstVaapiDecoderH264 * decoder, GstH264SPS * sps)
+{
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER_CAST (decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiContextInfo info;
+  GstVaapiProfile profile;
+  GstVaapiChromaType chroma_type;
+  gboolean reset_context = FALSE;
+  guint mb_width, mb_height, dpb_size, num_views;
+
+  num_views = get_num_views (sps);
+  if (priv->max_views < num_views) {
+    priv->max_views = num_views;
+    GST_DEBUG ("maximum number of views changed to %u", num_views);
+  }
+
+  dpb_size = get_max_dec_frame_buffering (sps);
+  if (priv->dpb_size < dpb_size) {
+    GST_DEBUG ("DPB size increased");
+    reset_context = TRUE;
+  }
+
+  profile = get_profile (decoder, sps, dpb_size);
+  if (!profile) {
+    GST_ERROR ("unsupported profile_idc %u", sps->profile_idc);
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+
+  if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
+    GST_DEBUG ("profile changed to %x", profile);
+    reset_context = TRUE;
+    priv->profile = profile;
+  }
+
+  if (reset_context) {
+    switch (num_views) {
+      case 1:
+        /* Frame-packed mode details should be copied from the parser
+         * if we set NONE */
+        gst_vaapi_decoder_set_multiview_mode (base_decoder,
+            num_views, GST_VIDEO_MULTIVIEW_MODE_NONE,
+            GST_VIDEO_MULTIVIEW_FLAGS_NONE);
+        break;
+      case 2:                  /* Assume stereo */
+        if (profile == GST_VAAPI_PROFILE_H264_STEREO_HIGH) {
+          GST_DEBUG ("Stereo profile - frame-by-frame output, %d views",
+              num_views);
+          gst_vaapi_decoder_set_multiview_mode (base_decoder, num_views,
+              GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME,
+              GST_VIDEO_MULTIVIEW_FLAGS_NONE);
+          break;
         }
+        /* non-stereo 2 views. Fall through */
+      default:
+        GST_DEBUG ("Multiview profile - frame-by-frame output, %d views",
+            num_views);
+        gst_vaapi_decoder_set_multiview_mode (base_decoder, num_views,
+            GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME,
+            GST_VIDEO_MULTIVIEW_FLAGS_NONE);
+        break;
     }
+  }
+
+  chroma_type = gst_vaapi_utils_h264_get_chroma_type (sps->chroma_format_idc);
+  if (!chroma_type) {
+    GST_ERROR ("unsupported chroma_format_idc %u", sps->chroma_format_idc);
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
+  }
+
+  if (priv->chroma_type != chroma_type) {
+    GST_DEBUG ("chroma format changed");
+    reset_context = TRUE;
+    priv->chroma_type = chroma_type;
+  }
+
+  mb_width = sps->pic_width_in_mbs_minus1 + 1;
+  mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
+      !sps->frame_mbs_only_flag;
+  if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
+    GST_DEBUG ("size changed");
+    reset_context = TRUE;
+    priv->mb_width = mb_width;
+    priv->mb_height = mb_height;
+  }
+
+  if (priv->progressive_sequence != sps->frame_mbs_only_flag) {
+    GST_DEBUG ("interlacing-mode changed");
+    priv->progressive_sequence = sps->frame_mbs_only_flag;
+    gst_vaapi_decoder_set_interlaced (base_decoder,
+        !priv->progressive_sequence);
+    priv->top_field_first = FALSE;
+  }
+
+  gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder,
+      sps->vui_parameters.par_n, sps->vui_parameters.par_d);
+
+  if (!reset_context && priv->has_context)
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
-    if (!chroma_type) {
-        GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
-    }
-
-    if (priv->chroma_type != chroma_type) {
-        GST_DEBUG("chroma format changed");
-        reset_context     = TRUE;
-        priv->chroma_type = chroma_type;
-    }
+  /* XXX: fix surface size when cropping is implemented */
+  info.profile = priv->profile;
+  info.entrypoint = priv->entrypoint;
+  info.chroma_type = priv->chroma_type;
+  info.width = sps->width;
+  info.height = sps->height;
+  info.ref_frames = dpb_size;
 
-    mb_width  = sps->pic_width_in_mbs_minus1 + 1;
-    mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
-        !sps->frame_mbs_only_flag;
-    if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
-        GST_DEBUG("size changed");
-        reset_context   = TRUE;
-        priv->mb_width  = mb_width;
-        priv->mb_height = mb_height;
-    }
+  if (!gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  priv->has_context = TRUE;
 
-    if (priv->progressive_sequence != sps->frame_mbs_only_flag) {
-        GST_DEBUG("interlacing-mode changed");
-        priv->progressive_sequence = sps->frame_mbs_only_flag;
-        gst_vaapi_decoder_set_interlaced(base_decoder,
-            !priv->progressive_sequence);
-        priv->top_field_first = FALSE;
-    }
+  /* Reset DPB */
+  if (!dpb_reset (decoder, dpb_size))
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
 
-    gst_vaapi_decoder_set_pixel_aspect_ratio(
-        base_decoder,
-        sps->vui_parameters.par_n,
-        sps->vui_parameters.par_d
-    );
-
-    if (!reset_context && priv->has_context)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    /* XXX: fix surface size when cropping is implemented */
-    info.profile    = priv->profile;
-    info.entrypoint = priv->entrypoint;
-    info.chroma_type = priv->chroma_type;
-    info.width      = sps->width;
-    info.height     = sps->height;
-    info.ref_frames = dpb_size;
-
-    if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    priv->has_context = TRUE;
-
-    /* Reset DPB */
-    if (!dpb_reset(decoder, dpb_size))
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-
-    /* Reset MVC data */
-    if (!mvc_reset(decoder))
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  /* Reset MVC data */
+  if (!mvc_reset (decoder))
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
-    const GstH264SPS *sps)
+fill_iq_matrix_4x4 (VAIQMatrixBufferH264 * iq_matrix, const GstH264PPS * pps,
+    const GstH264SPS * sps)
 {
-    guint i;
+  guint i;
 
-    /* There are always 6 4x4 scaling lists */
-    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
-    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
+  /* There are always 6 4x4 scaling lists */
+  g_assert (G_N_ELEMENTS (iq_matrix->ScalingList4x4) == 6);
+  g_assert (G_N_ELEMENTS (iq_matrix->ScalingList4x4[0]) == 16);
 
-    for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
-        gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
-            iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
+  for (i = 0; i < G_N_ELEMENTS (iq_matrix->ScalingList4x4); i++)
+    gst_h264_quant_matrix_4x4_get_raster_from_zigzag (iq_matrix->ScalingList4x4
+        [i], pps->scaling_lists_4x4[i]);
 }
 
 static void
-fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
-    const GstH264SPS *sps)
+fill_iq_matrix_8x8 (VAIQMatrixBufferH264 * iq_matrix, const GstH264PPS * pps,
+    const GstH264SPS * sps)
 {
-    guint i, n;
+  guint i, n;
 
-    /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
-    if (!pps->transform_8x8_mode_flag)
-        return;
+  /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
+  if (!pps->transform_8x8_mode_flag)
+    return;
 
-    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
-    g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
+  g_assert (G_N_ELEMENTS (iq_matrix->ScalingList8x8) >= 2);
+  g_assert (G_N_ELEMENTS (iq_matrix->ScalingList8x8[0]) == 64);
 
-    n = (sps->chroma_format_idc != 3) ? 2 : 6;
-    for (i = 0; i < n; i++) {
-        gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
-            iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
-    }
+  n = (sps->chroma_format_idc != 3) ? 2 : 6;
+  for (i = 0; i < n; i++) {
+    gst_h264_quant_matrix_8x8_get_raster_from_zigzag (iq_matrix->ScalingList8x8
+        [i], pps->scaling_lists_8x8[i]);
+  }
 }
 
 static GstVaapiDecoderStatus
-ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+ensure_quant_matrix (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture)
 {
-    GstVaapiPicture * const base_picture = &picture->base;
-    GstH264PPS * const pps = get_pps(decoder);
-    GstH264SPS * const sps = get_sps(decoder);
-    VAIQMatrixBufferH264 *iq_matrix;
+  GstVaapiPicture *const base_picture = &picture->base;
+  GstH264PPS *const pps = get_pps (decoder);
+  GstH264SPS *const sps = get_sps (decoder);
+  VAIQMatrixBufferH264 *iq_matrix;
 
-    base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
-    if (!base_picture->iq_matrix) {
-        GST_ERROR("failed to allocate IQ matrix");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    iq_matrix = base_picture->iq_matrix->param;
+  base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (H264, decoder);
+  if (!base_picture->iq_matrix) {
+    GST_ERROR ("failed to allocate IQ matrix");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  iq_matrix = base_picture->iq_matrix->param;
 
-    /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
-       is not large enough to hold lists for 4:4:4 */
-    if (sps->chroma_format_idc == 3)
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
+  /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
+     is not large enough to hold lists for 4:4:4 */
+  if (sps->chroma_format_idc == 3)
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
 
-    fill_iq_matrix_4x4(iq_matrix, pps, sps);
-    fill_iq_matrix_8x8(iq_matrix, pps, sps);
+  fill_iq_matrix_4x4 (iq_matrix, pps, sps);
+  fill_iq_matrix_8x8 (iq_matrix, pps, sps);
 
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gboolean
-is_valid_state(guint state, guint ref_state)
+is_valid_state (guint state, guint ref_state)
 {
-    return (state & ref_state) == ref_state;
+  return (state & ref_state) == ref_state;
 }
 
 static GstVaapiDecoderStatus
-decode_current_picture(GstVaapiDecoderH264 *decoder)
+decode_current_picture (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const sps_pi = decoder->priv.active_sps;
-    GstVaapiPictureH264 * const picture = priv->current_picture;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const sps_pi = decoder->priv.active_sps;
+  GstVaapiPictureH264 *const picture = priv->current_picture;
 
-    if (!is_valid_state(priv->decoder_state,
-            GST_H264_VIDEO_STATE_VALID_PICTURE))
-        goto drop_frame;
-
-    priv->decoder_state |= sps_pi->state;
-    if (!(priv->decoder_state & GST_H264_VIDEO_STATE_GOT_I_FRAME)) {
-        if (priv->decoder_state & GST_H264_VIDEO_STATE_GOT_P_SLICE)
-            goto drop_frame;
-        sps_pi->state |= GST_H264_VIDEO_STATE_GOT_I_FRAME;
-    }
+  if (!is_valid_state (priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
+    goto drop_frame;
 
-    priv->decoder_state = 0;
-    priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
+  priv->decoder_state |= sps_pi->state;
+  if (!(priv->decoder_state & GST_H264_VIDEO_STATE_GOT_I_FRAME)) {
+    if (priv->decoder_state & GST_H264_VIDEO_STATE_GOT_P_SLICE)
+      goto drop_frame;
+    sps_pi->state |= GST_H264_VIDEO_STATE_GOT_I_FRAME;
+  }
 
-    if (!picture)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->decoder_state = 0;
+  priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
 
-    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
-        goto error;
-    if (!exec_ref_pic_marking(decoder, picture))
-        goto error;
-    if (!dpb_add(decoder, picture))
-        goto error;
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
+  if (!picture)
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
+  if (!gst_vaapi_picture_decode (GST_VAAPI_PICTURE_CAST (picture)))
+    goto error;
+  if (!exec_ref_pic_marking (decoder, picture))
+    goto error;
+  if (!dpb_add (decoder, picture))
+    goto error;
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
 error:
-    /* XXX: fix for cases where first field failed to be decoded */
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  /* XXX: fix for cases where first field failed to be decoded */
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
 drop_frame:
-    priv->decoder_state = 0;
-    priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
-    return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+  priv->decoder_state = 0;
+  priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
+  return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
 }
 
 static GstVaapiDecoderStatus
-parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+parse_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SPS * const sps = &pi->data.sps;
-    GstH264ParserResult result;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SPS *const sps = &pi->data.sps;
+  GstH264ParserResult result;
 
-    GST_DEBUG("parse SPS");
+  GST_DEBUG ("parse SPS");
 
-    priv->parser_state = 0;
+  priv->parser_state = 0;
 
-    /* Variables that don't have inferred values per the H.264
-       standard but that should get a default value anyway */
-    sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+  /* Variables that don't have inferred values per the H.264
+     standard but that should get a default value anyway */
+  sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
 
-    result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
-    if (result != GST_H264_PARSER_OK)
-        return get_status(result);
+  result = gst_h264_parser_parse_sps (priv->parser, &pi->nalu, sps, TRUE);
+  if (result != GST_H264_PARSER_OK)
+    return get_status (result);
 
-    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+parse_subset_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SPS * const sps = &pi->data.sps;
-    GstH264ParserResult result;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SPS *const sps = &pi->data.sps;
+  GstH264ParserResult result;
 
-    GST_DEBUG("parse subset SPS");
+  GST_DEBUG ("parse subset SPS");
 
-    /* Variables that don't have inferred values per the H.264
-       standard but that should get a default value anyway */
-    sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+  /* Variables that don't have inferred values per the H.264
+     standard but that should get a default value anyway */
+  sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
 
-    result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
-        TRUE);
-    if (result != GST_H264_PARSER_OK)
-        return get_status(result);
+  result = gst_h264_parser_parse_subset_sps (priv->parser, &pi->nalu, sps,
+      TRUE);
+  if (result != GST_H264_PARSER_OK)
+    return get_status (result);
 
-    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+parse_pps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264PPS * const pps = &pi->data.pps;
-    GstH264ParserResult result;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264PPS *const pps = &pi->data.pps;
+  GstH264ParserResult result;
 
-    GST_DEBUG("parse PPS");
+  GST_DEBUG ("parse PPS");
 
-    priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
+  priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
 
-    /* Variables that don't have inferred values per the H.264
-       standard but that should get a default value anyway */
-    pps->slice_group_map_type = 0;
-    pps->slice_group_change_rate_minus1 = 0;
-    pps->slice_group_id = NULL;
+  /* Variables that don't have inferred values per the H.264
+     standard but that should get a default value anyway */
+  pps->slice_group_map_type = 0;
+  pps->slice_group_change_rate_minus1 = 0;
+  pps->slice_group_id = NULL;
 
-    result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
-    if (result != GST_H264_PARSER_OK)
-        return get_status(result);
+  result = gst_h264_parser_parse_pps (priv->parser, &pi->nalu, pps);
+  if (result != GST_H264_PARSER_OK)
+    return get_status (result);
 
-    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+parse_sei (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GArray ** const sei_ptr = &pi->data.sei;
-    GstH264ParserResult result;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GArray **const sei_ptr = &pi->data.sei;
+  GstH264ParserResult result;
 
-    GST_DEBUG("parse SEI");
+  GST_DEBUG ("parse SEI");
 
-    result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
-    if (result != GST_H264_PARSER_OK) {
-        GST_WARNING("failed to parse SEI messages");
-        return get_status(result);
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  result = gst_h264_parser_parse_sei (priv->parser, &pi->nalu, sei_ptr);
+  if (result != GST_H264_PARSER_OK) {
+    GST_WARNING ("failed to parse SEI messages");
+    return get_status (result);
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+parse_slice (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-    GstH264NalUnit * const nalu = &pi->nalu;
-    GstH264SPS *sps;
-    GstH264ParserResult result;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+  GstH264NalUnit *const nalu = &pi->nalu;
+  GstH264SPS *sps;
+  GstH264ParserResult result;
 
-    GST_DEBUG("parse slice");
+  GST_DEBUG ("parse slice");
 
-    priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
-                           GST_H264_VIDEO_STATE_GOT_PPS);
+  priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS |
+      GST_H264_VIDEO_STATE_GOT_PPS);
 
-    /* Propagate Prefix NAL unit info, if necessary */
-    switch (nalu->type) {
+  /* Propagate Prefix NAL unit info, if necessary */
+  switch (nalu->type) {
     case GST_H264_NAL_SLICE:
-    case GST_H264_NAL_SLICE_IDR: {
-        GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
-        if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
-            /* MVC sequences shall have a Prefix NAL unit immediately
-               preceding this NAL unit */
-            pi->nalu.extension_type = prev_pi->nalu.extension_type;
-            pi->nalu.extension = prev_pi->nalu.extension;
-        }
-        else {
-            /* In the very unlikely case there is no Prefix NAL unit
-               immediately preceding this NAL unit, try to infer some
-               defaults (H.7.4.1.1) */
-            GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
-            mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
-            nalu->idr_pic_flag = !mvc->non_idr_flag;
-            mvc->priority_id = 0;
-            mvc->view_id = 0;
-            mvc->temporal_id = 0;
-            mvc->anchor_pic_flag = 0;
-            mvc->inter_view_flag = 1;
-        }
-        break;
-    }
-    }
-
-    /* Variables that don't have inferred values per the H.264
-       standard but that should get a default value anyway */
-    slice_hdr->cabac_init_idc = 0;
-    slice_hdr->direct_spatial_mv_pred_flag = 0;
-
-    result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
-        slice_hdr, TRUE, TRUE);
-    if (result != GST_H264_PARSER_OK)
-        return get_status(result);
-
-    sps = slice_hdr->pps->sequence;
-
-    /* Update MVC data */
-    pi->view_id = get_view_id(&pi->nalu);
-    pi->voc = get_view_order_index(sps, pi->view_id);
-
-    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
-    if (!GST_H264_IS_I_SLICE(slice_hdr))
-        priv->parser_state |= GST_H264_VIDEO_STATE_GOT_P_SLICE;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    case GST_H264_NAL_SLICE_IDR:{
+      GstVaapiParserInfoH264 *const prev_pi = priv->prev_pi;
+      if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
+        /* MVC sequences shall have a Prefix NAL unit immediately
+           preceding this NAL unit */
+        pi->nalu.extension_type = prev_pi->nalu.extension_type;
+        pi->nalu.extension = prev_pi->nalu.extension;
+      } else {
+        /* In the very unlikely case there is no Prefix NAL unit
+           immediately preceding this NAL unit, try to infer some
+           defaults (H.7.4.1.1) */
+        GstH264NalUnitExtensionMVC *const mvc = &pi->nalu.extension.mvc;
+        mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
+        nalu->idr_pic_flag = !mvc->non_idr_flag;
+        mvc->priority_id = 0;
+        mvc->view_id = 0;
+        mvc->temporal_id = 0;
+        mvc->anchor_pic_flag = 0;
+        mvc->inter_view_flag = 1;
+      }
+      break;
+    }
+  }
+
+  /* Variables that don't have inferred values per the H.264
+     standard but that should get a default value anyway */
+  slice_hdr->cabac_init_idc = 0;
+  slice_hdr->direct_spatial_mv_pred_flag = 0;
+
+  result = gst_h264_parser_parse_slice_hdr (priv->parser, &pi->nalu,
+      slice_hdr, TRUE, TRUE);
+  if (result != GST_H264_PARSER_OK)
+    return get_status (result);
+
+  sps = slice_hdr->pps->sequence;
+
+  /* Update MVC data */
+  pi->view_id = get_view_id (&pi->nalu);
+  pi->voc = get_view_order_index (sps, pi->view_id);
+
+  priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
+  if (!GST_H264_IS_I_SLICE (slice_hdr))
+    priv->parser_state |= GST_H264_VIDEO_STATE_GOT_P_SLICE;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SPS * const sps = &pi->data.sps;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SPS *const sps = &pi->data.sps;
 
-    GST_DEBUG("decode SPS");
+  GST_DEBUG ("decode SPS");
 
-    gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_parser_info_h264_replace (&priv->sps[sps->id], pi);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_subset_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SPS * const sps = &pi->data.sps;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SPS *const sps = &pi->data.sps;
 
-    GST_DEBUG("decode subset SPS");
+  GST_DEBUG ("decode subset SPS");
 
-    gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_parser_info_h264_replace (&priv->sps[sps->id], pi);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_pps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264PPS * const pps = &pi->data.pps;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264PPS *const pps = &pi->data.pps;
 
-    GST_DEBUG("decode PPS");
+  GST_DEBUG ("decode PPS");
 
-    gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_parser_info_h264_replace (&priv->pps[pps->id], pi);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_sei (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    guint i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  guint i;
 
-    GST_DEBUG("decode SEI messages");
+  GST_DEBUG ("decode SEI messages");
 
-    for (i = 0; i < pi->data.sei->len; i++) {
-        const GstH264SEIMessage * const sei =
-            &g_array_index(pi->data.sei, GstH264SEIMessage, i);
+  for (i = 0; i < pi->data.sei->len; i++) {
+    const GstH264SEIMessage *const sei =
+        &g_array_index (pi->data.sei, GstH264SEIMessage, i);
 
-        switch (sei->payloadType) {
-        case GST_H264_SEI_PIC_TIMING: {
-            const GstH264PicTiming * const pic_timing =
-                &sei->payload.pic_timing;
-            if (pic_timing->pic_struct_present_flag)
-                priv->pic_structure = pic_timing->pic_struct;
-            break;
-        }
-        default:
-            break;
-        }
+    switch (sei->payloadType) {
+      case GST_H264_SEI_PIC_TIMING:{
+        const GstH264PicTiming *const pic_timing = &sei->payload.pic_timing;
+        if (pic_timing->pic_struct_present_flag)
+          priv->pic_structure = pic_timing->pic_struct;
+        break;
+      }
+      default:
+        break;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderH264 *decoder)
+decode_sequence_end (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const sps_pi = decoder->priv.active_sps;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const sps_pi = decoder->priv.active_sps;
 
-    GST_DEBUG("decode sequence-end");
+  GST_DEBUG ("decode sequence-end");
 
-    /* Sequence ended, don't try to propagate "got I-frame" state
-       beyond this point */
-    if (sps_pi)
-        sps_pi->state &= ~GST_H264_VIDEO_STATE_GOT_I_FRAME;
+  /* Sequence ended, don't try to propagate "got I-frame" state
+     beyond this point */
+  if (sps_pi)
+    sps_pi->state &= ~GST_H264_VIDEO_STATE_GOT_I_FRAME;
 
-    dpb_flush(decoder, NULL);
+  dpb_flush (decoder, NULL);
 
-    /* Reset defaults, should there be a new sequence available next */
-    priv->max_views = 1;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  /* Reset defaults, should there be a new sequence available next */
+  priv->max_views = 1;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 /* 8.2.1.1 - Decoding process for picture order count type 0 */
 static void
-init_picture_poc_0(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
-    gint32 temp_poc;
-
-    GST_DEBUG("decode picture order count type 0");
-
-    if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
-        priv->prev_poc_msb = 0;
-        priv->prev_poc_lsb = 0;
-    }
-    else if (priv->prev_pic_has_mmco5) {
-        priv->prev_poc_msb = 0;
-        priv->prev_poc_lsb =
-            (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
-             0 : priv->field_poc[TOP_FIELD]);
-    }
-    else {
-        priv->prev_poc_msb = priv->poc_msb;
-        priv->prev_poc_lsb = priv->poc_lsb;
-    }
-
-    // (8-3)
-    priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
-    if (priv->poc_lsb < priv->prev_poc_lsb &&
-        (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
-        priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
-    else if (priv->poc_lsb > priv->prev_poc_lsb &&
-             (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
-        priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
-    else
-        priv->poc_msb = priv->prev_poc_msb;
-
-    temp_poc = priv->poc_msb + priv->poc_lsb;
-    switch (picture->structure) {
+init_picture_poc_0 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  const gint32 MaxPicOrderCntLsb =
+      1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
+  gint32 temp_poc;
+
+  GST_DEBUG ("decode picture order count type 0");
+
+  if (GST_VAAPI_PICTURE_IS_IDR (picture)) {
+    priv->prev_poc_msb = 0;
+    priv->prev_poc_lsb = 0;
+  } else if (priv->prev_pic_has_mmco5) {
+    priv->prev_poc_msb = 0;
+    priv->prev_poc_lsb =
+        (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
+        0 : priv->field_poc[TOP_FIELD]);
+  } else {
+    priv->prev_poc_msb = priv->poc_msb;
+    priv->prev_poc_lsb = priv->poc_lsb;
+  }
+
+  // (8-3)
+  priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
+  if (priv->poc_lsb < priv->prev_poc_lsb &&
+      (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
+    priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
+  else if (priv->poc_lsb > priv->prev_poc_lsb &&
+      (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
+    priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
+  else
+    priv->poc_msb = priv->prev_poc_msb;
+
+  temp_poc = priv->poc_msb + priv->poc_lsb;
+  switch (picture->structure) {
     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
-        // (8-4, 8-5)
-        priv->field_poc[TOP_FIELD] = temp_poc;
-        priv->field_poc[BOTTOM_FIELD] = temp_poc +
-            slice_hdr->delta_pic_order_cnt_bottom;
-        break;
+      // (8-4, 8-5)
+      priv->field_poc[TOP_FIELD] = temp_poc;
+      priv->field_poc[BOTTOM_FIELD] = temp_poc +
+          slice_hdr->delta_pic_order_cnt_bottom;
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
-        // (8-4)
-        priv->field_poc[TOP_FIELD] = temp_poc;
-        break;
+      // (8-4)
+      priv->field_poc[TOP_FIELD] = temp_poc;
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
-        // (8-5)
-        priv->field_poc[BOTTOM_FIELD] = temp_poc;
-        break;
-    }
+      // (8-5)
+      priv->field_poc[BOTTOM_FIELD] = temp_poc;
+      break;
+  }
 }
 
 /* 8.2.1.2 - Decoding process for picture order count type 1 */
 static void
-init_picture_poc_1(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
-    gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
-    guint i;
-
-    GST_DEBUG("decode picture order count type 1");
-
-    if (priv->prev_pic_has_mmco5)
-        prev_frame_num_offset = 0;
-    else
-        prev_frame_num_offset = priv->frame_num_offset;
+init_picture_poc_1 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+  gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
+  guint i;
 
-    // (8-6)
-    if (GST_VAAPI_PICTURE_IS_IDR(picture))
-        priv->frame_num_offset = 0;
-    else if (priv->prev_frame_num > priv->frame_num)
-        priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
-    else
-        priv->frame_num_offset = prev_frame_num_offset;
+  GST_DEBUG ("decode picture order count type 1");
 
-    // (8-7)
-    if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
-        abs_frame_num = priv->frame_num_offset + priv->frame_num;
-    else
-        abs_frame_num = 0;
-    if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
-        abs_frame_num = abs_frame_num - 1;
-
-    if (abs_frame_num > 0) {
-        gint32 expected_delta_per_poc_cycle;
-        gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
-
-        expected_delta_per_poc_cycle = 0;
-        for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
-            expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
-
-        // (8-8)
-        poc_cycle_cnt = (abs_frame_num - 1) /
-            sps->num_ref_frames_in_pic_order_cnt_cycle;
-        frame_num_in_poc_cycle = (abs_frame_num - 1) %
-            sps->num_ref_frames_in_pic_order_cnt_cycle;
-
-        // (8-9)
-        expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
-        for (i = 0; i <= frame_num_in_poc_cycle; i++)
-            expected_poc += sps->offset_for_ref_frame[i];
-    }
-    else
-        expected_poc = 0;
-    if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
-        expected_poc += sps->offset_for_non_ref_pic;
+  if (priv->prev_pic_has_mmco5)
+    prev_frame_num_offset = 0;
+  else
+    prev_frame_num_offset = priv->frame_num_offset;
 
-    // (8-10)
-    switch (picture->structure) {
+  // (8-6)
+  if (GST_VAAPI_PICTURE_IS_IDR (picture))
+    priv->frame_num_offset = 0;
+  else if (priv->prev_frame_num > priv->frame_num)
+    priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
+  else
+    priv->frame_num_offset = prev_frame_num_offset;
+
+  // (8-7)
+  if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
+    abs_frame_num = priv->frame_num_offset + priv->frame_num;
+  else
+    abs_frame_num = 0;
+  if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture) && abs_frame_num > 0)
+    abs_frame_num = abs_frame_num - 1;
+
+  if (abs_frame_num > 0) {
+    gint32 expected_delta_per_poc_cycle;
+    gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
+
+    expected_delta_per_poc_cycle = 0;
+    for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
+      expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
+
+    // (8-8)
+    poc_cycle_cnt = (abs_frame_num - 1) /
+        sps->num_ref_frames_in_pic_order_cnt_cycle;
+    frame_num_in_poc_cycle = (abs_frame_num - 1) %
+        sps->num_ref_frames_in_pic_order_cnt_cycle;
+
+    // (8-9)
+    expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
+    for (i = 0; i <= frame_num_in_poc_cycle; i++)
+      expected_poc += sps->offset_for_ref_frame[i];
+  } else
+    expected_poc = 0;
+  if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture))
+    expected_poc += sps->offset_for_non_ref_pic;
+
+  // (8-10)
+  switch (picture->structure) {
     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
-        priv->field_poc[TOP_FIELD] = expected_poc +
-            slice_hdr->delta_pic_order_cnt[0];
-        priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
-            sps->offset_for_top_to_bottom_field +
-            slice_hdr->delta_pic_order_cnt[1];
-        break;
+      priv->field_poc[TOP_FIELD] = expected_poc +
+          slice_hdr->delta_pic_order_cnt[0];
+      priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
+          sps->offset_for_top_to_bottom_field +
+          slice_hdr->delta_pic_order_cnt[1];
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
-        priv->field_poc[TOP_FIELD] = expected_poc +
-            slice_hdr->delta_pic_order_cnt[0];
-        break;
+      priv->field_poc[TOP_FIELD] = expected_poc +
+          slice_hdr->delta_pic_order_cnt[0];
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
-        priv->field_poc[BOTTOM_FIELD] = expected_poc + 
-            sps->offset_for_top_to_bottom_field +
-            slice_hdr->delta_pic_order_cnt[0];
-        break;
-    }
+      priv->field_poc[BOTTOM_FIELD] = expected_poc +
+          sps->offset_for_top_to_bottom_field +
+          slice_hdr->delta_pic_order_cnt[0];
+      break;
+  }
 }
 
 /* 8.2.1.3 - Decoding process for picture order count type 2 */
 static void
-init_picture_poc_2(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
+init_picture_poc_2 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
-    gint32 prev_frame_num_offset, temp_poc;
-
-    GST_DEBUG("decode picture order count type 2");
-
-    if (priv->prev_pic_has_mmco5)
-        prev_frame_num_offset = 0;
-    else
-        prev_frame_num_offset = priv->frame_num_offset;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+  gint32 prev_frame_num_offset, temp_poc;
 
-    // (8-11)
-    if (GST_VAAPI_PICTURE_IS_IDR(picture))
-        priv->frame_num_offset = 0;
-    else if (priv->prev_frame_num > priv->frame_num)
-        priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
-    else
-        priv->frame_num_offset = prev_frame_num_offset;
+  GST_DEBUG ("decode picture order count type 2");
 
-    // (8-12)
-    if (GST_VAAPI_PICTURE_IS_IDR(picture))
-        temp_poc = 0;
-    else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
-        temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
-    else
-        temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
+  if (priv->prev_pic_has_mmco5)
+    prev_frame_num_offset = 0;
+  else
+    prev_frame_num_offset = priv->frame_num_offset;
 
-    // (8-13)
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
-        priv->field_poc[TOP_FIELD] = temp_poc;
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
-        priv->field_poc[BOTTOM_FIELD] = temp_poc;
+  // (8-11)
+  if (GST_VAAPI_PICTURE_IS_IDR (picture))
+    priv->frame_num_offset = 0;
+  else if (priv->prev_frame_num > priv->frame_num)
+    priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
+  else
+    priv->frame_num_offset = prev_frame_num_offset;
+
+  // (8-12)
+  if (GST_VAAPI_PICTURE_IS_IDR (picture))
+    temp_poc = 0;
+  else if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture))
+    temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
+  else
+    temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
+
+  // (8-13)
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
+    priv->field_poc[TOP_FIELD] = temp_poc;
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
+    priv->field_poc[BOTTOM_FIELD] = temp_poc;
 }
 
 /* 8.2.1 - Decoding process for picture order count */
 static void
-init_picture_poc(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
+init_picture_poc (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
 
-    switch (sps->pic_order_cnt_type) {
+  switch (sps->pic_order_cnt_type) {
     case 0:
-        init_picture_poc_0(decoder, picture, slice_hdr);
-        break;
+      init_picture_poc_0 (decoder, picture, slice_hdr);
+      break;
     case 1:
-        init_picture_poc_1(decoder, picture, slice_hdr);
-        break;
+      init_picture_poc_1 (decoder, picture, slice_hdr);
+      break;
     case 2:
-        init_picture_poc_2(decoder, picture, slice_hdr);
-        break;
-    }
+      init_picture_poc_2 (decoder, picture, slice_hdr);
+      break;
+  }
 
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
-        picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
-        picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
-    picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
+    picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
+    picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
+  picture->base.poc = MIN (picture->field_poc[0], picture->field_poc[1]);
 }
 
 static int
-compare_picture_pic_num_dec(const void *a, const void *b)
+compare_picture_pic_num_dec (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picB->pic_num - picA->pic_num;
+  return picB->pic_num - picA->pic_num;
 }
 
 static int
-compare_picture_long_term_pic_num_inc(const void *a, const void *b)
+compare_picture_long_term_pic_num_inc (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picA->long_term_pic_num - picB->long_term_pic_num;
+  return picA->long_term_pic_num - picB->long_term_pic_num;
 }
 
 static int
-compare_picture_poc_dec(const void *a, const void *b)
+compare_picture_poc_dec (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picB->base.poc - picA->base.poc;
+  return picB->base.poc - picA->base.poc;
 }
 
 static int
-compare_picture_poc_inc(const void *a, const void *b)
+compare_picture_poc_inc (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picA->base.poc - picB->base.poc;
+  return picA->base.poc - picB->base.poc;
 }
 
 static int
-compare_picture_frame_num_wrap_dec(const void *a, const void *b)
+compare_picture_frame_num_wrap_dec (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picB->frame_num_wrap - picA->frame_num_wrap;
+  return picB->frame_num_wrap - picA->frame_num_wrap;
 }
 
 static int
-compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
+compare_picture_long_term_frame_idx_inc (const void *a, const void *b)
 {
-    const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
-    const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
+  const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
+  const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
 
-    return picA->long_term_frame_idx - picB->long_term_frame_idx;
+  return picA->long_term_frame_idx - picB->long_term_frame_idx;
 }
 
 /* 8.2.4.1 - Decoding process for picture numbers */
 static void
-init_picture_refs_pic_num(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
+init_picture_refs_pic_num (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
-    guint i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+  guint i;
 
-    GST_DEBUG("decode picture numbers");
+  GST_DEBUG ("decode picture numbers");
 
-    for (i = 0; i < priv->short_ref_count; i++) {
-        GstVaapiPictureH264 * const pic = priv->short_ref[i];
+  for (i = 0; i < priv->short_ref_count; i++) {
+    GstVaapiPictureH264 *const pic = priv->short_ref[i];
 
-        // (H.8.2)
-        if (pic->base.view_id != picture->base.view_id)
-            continue;
-
-        // (8-27)
-        if (pic->frame_num > priv->frame_num)
-            pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
-        else
-            pic->frame_num_wrap = pic->frame_num;
-
-        // (8-28, 8-30, 8-31)
-        if (GST_VAAPI_PICTURE_IS_FRAME(picture))
-            pic->pic_num = pic->frame_num_wrap;
-        else {
-            if (pic->structure == picture->structure)
-                pic->pic_num = 2 * pic->frame_num_wrap + 1;
-            else
-                pic->pic_num = 2 * pic->frame_num_wrap;
-        }
+    // (H.8.2)
+    if (pic->base.view_id != picture->base.view_id)
+      continue;
+
+    // (8-27)
+    if (pic->frame_num > priv->frame_num)
+      pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
+    else
+      pic->frame_num_wrap = pic->frame_num;
+
+    // (8-28, 8-30, 8-31)
+    if (GST_VAAPI_PICTURE_IS_FRAME (picture))
+      pic->pic_num = pic->frame_num_wrap;
+    else {
+      if (pic->structure == picture->structure)
+        pic->pic_num = 2 * pic->frame_num_wrap + 1;
+      else
+        pic->pic_num = 2 * pic->frame_num_wrap;
     }
+  }
 
-    for (i = 0; i < priv->long_ref_count; i++) {
-        GstVaapiPictureH264 * const pic = priv->long_ref[i];
+  for (i = 0; i < priv->long_ref_count; i++) {
+    GstVaapiPictureH264 *const pic = priv->long_ref[i];
 
-        // (H.8.2)
-        if (pic->base.view_id != picture->base.view_id)
-            continue;
-
-        // (8-29, 8-32, 8-33)
-        if (GST_VAAPI_PICTURE_IS_FRAME(picture))
-            pic->long_term_pic_num = pic->long_term_frame_idx;
-        else {
-            if (pic->structure == picture->structure)
-                pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
-            else
-                pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
-        }
+    // (H.8.2)
+    if (pic->base.view_id != picture->base.view_id)
+      continue;
+
+    // (8-29, 8-32, 8-33)
+    if (GST_VAAPI_PICTURE_IS_FRAME (picture))
+      pic->long_term_pic_num = pic->long_term_frame_idx;
+    else {
+      if (pic->structure == picture->structure)
+        pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
+      else
+        pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
     }
+  }
 }
 
 #define SORT_REF_LIST(list, n, compare_func) \
     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
 
 static void
-init_picture_refs_fields_1(
-    guint                picture_structure,
-    GstVaapiPictureH264 *RefPicList[32],
-    guint               *RefPicList_count,
-    GstVaapiPictureH264 *ref_list[32],
-    guint                ref_list_count
-)
-{
-    guint i, j, n;
-
-    i = 0;
-    j = 0;
-    n = *RefPicList_count;
-    do {
-        g_assert(n < 32);
-        for (; i < ref_list_count; i++) {
-            if (ref_list[i]->structure == picture_structure) {
-                RefPicList[n++] = ref_list[i++];
-                break;
-            }
-        }
-        for (; j < ref_list_count; j++) {
-            if (ref_list[j]->structure != picture_structure) {
-                RefPicList[n++] = ref_list[j++];
-                break;
-            }
-        }
-    } while (i < ref_list_count || j < ref_list_count);
-    *RefPicList_count = n;
+init_picture_refs_fields_1 (guint picture_structure,
+    GstVaapiPictureH264 * RefPicList[32],
+    guint * RefPicList_count,
+    GstVaapiPictureH264 * ref_list[32], guint ref_list_count)
+{
+  guint i, j, n;
+
+  i = 0;
+  j = 0;
+  n = *RefPicList_count;
+  do {
+    g_assert (n < 32);
+    for (; i < ref_list_count; i++) {
+      if (ref_list[i]->structure == picture_structure) {
+        RefPicList[n++] = ref_list[i++];
+        break;
+      }
+    }
+    for (; j < ref_list_count; j++) {
+      if (ref_list[j]->structure != picture_structure) {
+        RefPicList[n++] = ref_list[j++];
+        break;
+      }
+    }
+  } while (i < ref_list_count || j < ref_list_count);
+  *RefPicList_count = n;
 }
 
 static inline void
-init_picture_refs_fields(
-    GstVaapiPictureH264 *picture,
-    GstVaapiPictureH264 *RefPicList[32],
-    guint               *RefPicList_count,
-    GstVaapiPictureH264 *short_ref[32],
-    guint                short_ref_count,
-    GstVaapiPictureH264 *long_ref[32],
-    guint                long_ref_count
-)
-{
-    guint n = 0;
-
-    /* 8.2.4.2.5 - reference picture lists in fields */
-    init_picture_refs_fields_1(picture->structure, RefPicList, &n,
-        short_ref, short_ref_count);
-    init_picture_refs_fields_1(picture->structure, RefPicList, &n,
-        long_ref, long_ref_count);
-    *RefPicList_count = n;
+init_picture_refs_fields (GstVaapiPictureH264 * picture,
+    GstVaapiPictureH264 * RefPicList[32],
+    guint * RefPicList_count,
+    GstVaapiPictureH264 * short_ref[32],
+    guint short_ref_count,
+    GstVaapiPictureH264 * long_ref[32], guint long_ref_count)
+{
+  guint n = 0;
+
+  /* 8.2.4.2.5 - reference picture lists in fields */
+  init_picture_refs_fields_1 (picture->structure, RefPicList, &n,
+      short_ref, short_ref_count);
+  init_picture_refs_fields_1 (picture->structure, RefPicList, &n,
+      long_ref, long_ref_count);
+  *RefPicList_count = n;
 }
 
 /* Finds the inter-view reference picture with the supplied view id */
 static GstVaapiPictureH264 *
-find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
+find_inter_view_reference (GstVaapiDecoderH264 * decoder, guint16 view_id)
 {
-    GPtrArray * const inter_views = decoder->priv.inter_views;
-    guint i;
+  GPtrArray *const inter_views = decoder->priv.inter_views;
+  guint i;
 
-    for (i = 0; i < inter_views->len; i++) {
-        GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
-        if (picture->base.view_id == view_id)
-            return picture;
-    }
+  for (i = 0; i < inter_views->len; i++) {
+    GstVaapiPictureH264 *const picture = g_ptr_array_index (inter_views, i);
+    if (picture->base.view_id == view_id)
+      return picture;
+  }
 
-    GST_WARNING("failed to find inter-view reference picture for view_id: %d",
-        view_id);
-    return NULL;
+  GST_WARNING ("failed to find inter-view reference picture for view_id: %d",
+      view_id);
+  return NULL;
 }
 
 /* Checks whether the view id exists in the supplied list of view ids */
 static gboolean
-find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
+find_view_id (guint16 view_id, const guint16 * view_ids, guint num_view_ids)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < num_view_ids; i++) {
-        if (view_ids[i] == view_id)
-            return TRUE;
-    }
-    return FALSE;
+  for (i = 0; i < num_view_ids; i++) {
+    if (view_ids[i] == view_id)
+      return TRUE;
+  }
+  return FALSE;
 }
 
 static gboolean
-find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
+find_view_id_in_view (guint16 view_id, const GstH264SPSExtMVCView * view,
     gboolean is_anchor)
 {
-    if (is_anchor)
-        return (find_view_id(view_id, view->anchor_ref_l0,
-                    view->num_anchor_refs_l0) ||
-                find_view_id(view_id, view->anchor_ref_l1,
-                    view->num_anchor_refs_l1));
+  if (is_anchor)
+    return (find_view_id (view_id, view->anchor_ref_l0,
+            view->num_anchor_refs_l0) ||
+        find_view_id (view_id, view->anchor_ref_l1, view->num_anchor_refs_l1));
 
-    return (find_view_id(view_id, view->non_anchor_ref_l0,
-                view->num_non_anchor_refs_l0) ||
-            find_view_id(view_id, view->non_anchor_ref_l1,
-                view->num_non_anchor_refs_l1));
+  return (find_view_id (view_id, view->non_anchor_ref_l0,
+          view->num_non_anchor_refs_l0) ||
+      find_view_id (view_id, view->non_anchor_ref_l1,
+          view->num_non_anchor_refs_l1));
 }
 
 /* Checks whether the inter-view reference picture with the supplied
    view id is used for decoding the current view component picture */
 static gboolean
-is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
-    guint16 view_id, GstVaapiPictureH264 *picture)
+is_inter_view_reference_for_picture (GstVaapiDecoderH264 * decoder,
+    guint16 view_id, GstVaapiPictureH264 * picture)
 {
-    const GstH264SPS * const sps = get_sps(decoder);
-    gboolean is_anchor;
+  const GstH264SPS *const sps = get_sps (decoder);
+  gboolean is_anchor;
 
-    if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
-        sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
-        return FALSE;
+  if (!GST_VAAPI_PICTURE_IS_MVC (picture) ||
+      sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+    return FALSE;
 
-    is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
-    return find_view_id_in_view(view_id,
-        &sps->extension.mvc.view[picture->base.voc], is_anchor);
+  is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR (picture);
+  return find_view_id_in_view (view_id,
+      &sps->extension.mvc.view[picture->base.voc], is_anchor);
 }
 
 /* Checks whether the supplied inter-view reference picture is used
    for decoding the next view component pictures */
 static gboolean
-is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture)
+is_inter_view_reference_for_next_pictures (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture)
 {
-    const GstH264SPS * const sps = get_sps(decoder);
-    gboolean is_anchor;
-    guint i, num_views;
+  const GstH264SPS *const sps = get_sps (decoder);
+  gboolean is_anchor;
+  guint i, num_views;
 
-    if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
-        sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
-        return FALSE;
-
-    is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
-    num_views = sps->extension.mvc.num_views_minus1 + 1;
-    for (i = picture->base.voc + 1; i < num_views; i++) {
-        const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
-        if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
-            return TRUE;
-    }
+  if (!GST_VAAPI_PICTURE_IS_MVC (picture) ||
+      sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
     return FALSE;
+
+  is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR (picture);
+  num_views = sps->extension.mvc.num_views_minus1 + 1;
+  for (i = picture->base.voc + 1; i < num_views; i++) {
+    const GstH264SPSExtMVCView *const view = &sps->extension.mvc.view[i];
+    if (find_view_id_in_view (picture->base.view_id, view, is_anchor))
+      return TRUE;
+  }
+  return FALSE;
 }
 
 /* H.8.2.1 - Initialization process for inter-view prediction references */
 static void
-init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
-    const guint16 *view_ids, guint num_view_ids)
-{
-    guint j, n;
-
-    n = *ref_list_count_ptr;
-    for (j = 0; j < num_view_ids && n < num_refs; j++) {
-        GstVaapiPictureH264 * const pic =
-            find_inter_view_reference(decoder, view_ids[j]);
-        if (pic)
-            ref_list[n++] = pic;
-    }
-    *ref_list_count_ptr = n;
+init_picture_refs_mvc_1 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 ** ref_list, guint * ref_list_count_ptr, guint num_refs,
+    const guint16 * view_ids, guint num_view_ids)
+{
+  guint j, n;
+
+  n = *ref_list_count_ptr;
+  for (j = 0; j < num_view_ids && n < num_refs; j++) {
+    GstVaapiPictureH264 *const pic =
+        find_inter_view_reference (decoder, view_ids[j]);
+    if (pic)
+      ref_list[n++] = pic;
+  }
+  *ref_list_count_ptr = n;
 }
 
 static inline void
-init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
+init_picture_refs_mvc (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr, guint list)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    const GstH264SPS * const sps = get_sps(decoder);
-    const GstH264SPSExtMVCView *view;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  const GstH264SPS *const sps = get_sps (decoder);
+  const GstH264SPSExtMVCView *view;
 
-    GST_DEBUG("initialize reference picture list for inter-view prediction");
+  GST_DEBUG ("initialize reference picture list for inter-view prediction");
 
-    if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
-        return;
-    view = &sps->extension.mvc.view[picture->base.voc];
+  if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+    return;
+  view = &sps->extension.mvc.view[picture->base.voc];
 
 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
         init_picture_refs_mvc_1(decoder,                                \
@@ -2403,1384 +2368,1327 @@ init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
             view->num_##view_list##s_l##ref_list);                      \
     } while (0)
 
-    if (list == 0) {
-        if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
-            INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
-        else
-            INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
-    }
-    else {
-        if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
-            INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
-        else
-            INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
-    }
+  if (list == 0) {
+    if (GST_VAAPI_PICTURE_IS_ANCHOR (picture))
+      INVOKE_INIT_PICTURE_REFS_MVC (0, anchor_ref);
+    else
+      INVOKE_INIT_PICTURE_REFS_MVC (0, non_anchor_ref);
+  } else {
+    if (GST_VAAPI_PICTURE_IS_ANCHOR (picture))
+      INVOKE_INIT_PICTURE_REFS_MVC (1, anchor_ref);
+    else
+      INVOKE_INIT_PICTURE_REFS_MVC (1, non_anchor_ref);
+  }
 
 #undef INVOKE_INIT_PICTURE_REFS_MVC
 }
 
 static void
-init_picture_refs_p_slice(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 **ref_list;
-    guint i;
-
-    GST_DEBUG("decode reference picture list for P and SP slices");
-
-    if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
-        /* 8.2.4.2.1 - P and SP slices in frames */
-        if (priv->short_ref_count > 0) {
-            ref_list = priv->RefPicList0;
-            for (i = 0; i < priv->short_ref_count; i++)
-                ref_list[i] = priv->short_ref[i];
-            SORT_REF_LIST(ref_list, i, pic_num_dec);
-            priv->RefPicList0_count += i;
-        }
-
-        if (priv->long_ref_count > 0) {
-            ref_list = &priv->RefPicList0[priv->RefPicList0_count];
-            for (i = 0; i < priv->long_ref_count; i++)
-                ref_list[i] = priv->long_ref[i];
-            SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
-            priv->RefPicList0_count += i;
-        }
-    }
-    else {
-        /* 8.2.4.2.2 - P and SP slices in fields */
-        GstVaapiPictureH264 *short_ref[32];
-        guint short_ref_count = 0;
-        GstVaapiPictureH264 *long_ref[32];
-        guint long_ref_count = 0;
-
-        if (priv->short_ref_count > 0) {
-            for (i = 0; i < priv->short_ref_count; i++)
-                short_ref[i] = priv->short_ref[i];
-            SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
-            short_ref_count = i;
-        }
-
-        if (priv->long_ref_count > 0) {
-            for (i = 0; i < priv->long_ref_count; i++)
-                long_ref[i] = priv->long_ref[i];
-            SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
-            long_ref_count = i;
-        }
-
-        init_picture_refs_fields(
-            picture,
-            priv->RefPicList0, &priv->RefPicList0_count,
-            short_ref,          short_ref_count,
-            long_ref,           long_ref_count
-        );
-    }
-
-    if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
-        /* RefPicList0 */
-        init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
-    }
+init_picture_refs_p_slice (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 **ref_list;
+  guint i;
+
+  GST_DEBUG ("decode reference picture list for P and SP slices");
+
+  if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
+    /* 8.2.4.2.1 - P and SP slices in frames */
+    if (priv->short_ref_count > 0) {
+      ref_list = priv->RefPicList0;
+      for (i = 0; i < priv->short_ref_count; i++)
+        ref_list[i] = priv->short_ref[i];
+      SORT_REF_LIST (ref_list, i, pic_num_dec);
+      priv->RefPicList0_count += i;
+    }
+
+    if (priv->long_ref_count > 0) {
+      ref_list = &priv->RefPicList0[priv->RefPicList0_count];
+      for (i = 0; i < priv->long_ref_count; i++)
+        ref_list[i] = priv->long_ref[i];
+      SORT_REF_LIST (ref_list, i, long_term_pic_num_inc);
+      priv->RefPicList0_count += i;
+    }
+  } else {
+    /* 8.2.4.2.2 - P and SP slices in fields */
+    GstVaapiPictureH264 *short_ref[32];
+    guint short_ref_count = 0;
+    GstVaapiPictureH264 *long_ref[32];
+    guint long_ref_count = 0;
+
+    if (priv->short_ref_count > 0) {
+      for (i = 0; i < priv->short_ref_count; i++)
+        short_ref[i] = priv->short_ref[i];
+      SORT_REF_LIST (short_ref, i, frame_num_wrap_dec);
+      short_ref_count = i;
+    }
+
+    if (priv->long_ref_count > 0) {
+      for (i = 0; i < priv->long_ref_count; i++)
+        long_ref[i] = priv->long_ref[i];
+      SORT_REF_LIST (long_ref, i, long_term_frame_idx_inc);
+      long_ref_count = i;
+    }
+
+    init_picture_refs_fields (picture,
+        priv->RefPicList0, &priv->RefPicList0_count,
+        short_ref, short_ref_count, long_ref, long_ref_count);
+  }
+
+  if (GST_VAAPI_PICTURE_IS_MVC (picture)) {
+    /* RefPicList0 */
+    init_picture_refs_mvc (decoder, picture, slice_hdr, 0);
+  }
 }
 
 static void
-init_picture_refs_b_slice(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 **ref_list;
-    guint i, n;
-
-    GST_DEBUG("decode reference picture list for B slices");
-
-    if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
-        /* 8.2.4.2.3 - B slices in frames */
-
-        /* RefPicList0 */
-        if (priv->short_ref_count > 0) {
-            // 1. Short-term references
-            ref_list = priv->RefPicList0;
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc < picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_dec);
-            priv->RefPicList0_count += n;
-
-            ref_list = &priv->RefPicList0[priv->RefPicList0_count];
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc >= picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_inc);
-            priv->RefPicList0_count += n;
-        }
-
-        if (priv->long_ref_count > 0) {
-            // 2. Long-term references
-            ref_list = &priv->RefPicList0[priv->RefPicList0_count];
-            for (n = 0, i = 0; i < priv->long_ref_count; i++)
-                ref_list[n++] = priv->long_ref[i];
-            SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
-            priv->RefPicList0_count += n;
-        }
-
-        /* RefPicList1 */
-        if (priv->short_ref_count > 0) {
-            // 1. Short-term references
-            ref_list = priv->RefPicList1;
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc > picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_inc);
-            priv->RefPicList1_count += n;
-
-            ref_list = &priv->RefPicList1[priv->RefPicList1_count];
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc <= picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_dec);
-            priv->RefPicList1_count += n;
-        }
-
-        if (priv->long_ref_count > 0) {
-            // 2. Long-term references
-            ref_list = &priv->RefPicList1[priv->RefPicList1_count];
-            for (n = 0, i = 0; i < priv->long_ref_count; i++)
-                ref_list[n++] = priv->long_ref[i];
-            SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
-            priv->RefPicList1_count += n;
-        }
-    }
-    else {
-        /* 8.2.4.2.4 - B slices in fields */
-        GstVaapiPictureH264 *short_ref0[32];
-        guint short_ref0_count = 0;
-        GstVaapiPictureH264 *short_ref1[32];
-        guint short_ref1_count = 0;
-        GstVaapiPictureH264 *long_ref[32];
-        guint long_ref_count = 0;
-
-        /* refFrameList0ShortTerm */
-        if (priv->short_ref_count > 0) {
-            ref_list = short_ref0;
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc <= picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_dec);
-            short_ref0_count += n;
-
-            ref_list = &short_ref0[short_ref0_count];
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc > picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_inc);
-            short_ref0_count += n;
-        }
+init_picture_refs_b_slice (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 **ref_list;
+  guint i, n;
 
-        /* refFrameList1ShortTerm */
-        if (priv->short_ref_count > 0) {
-            ref_list = short_ref1;
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc > picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_inc);
-            short_ref1_count += n;
-
-            ref_list = &short_ref1[short_ref1_count];
-            for (n = 0, i = 0; i < priv->short_ref_count; i++) {
-                if (priv->short_ref[i]->base.poc <= picture->base.poc)
-                    ref_list[n++] = priv->short_ref[i];
-            }
-            SORT_REF_LIST(ref_list, n, poc_dec);
-            short_ref1_count += n;
-        }
+  GST_DEBUG ("decode reference picture list for B slices");
 
-        /* refFrameListLongTerm */
-        if (priv->long_ref_count > 0) {
-            for (i = 0; i < priv->long_ref_count; i++)
-                long_ref[i] = priv->long_ref[i];
-            SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
-            long_ref_count = i;
-        }
+  if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
+    /* 8.2.4.2.3 - B slices in frames */
 
-        init_picture_refs_fields(
-            picture,
-            priv->RefPicList0, &priv->RefPicList0_count,
-            short_ref0,         short_ref0_count,
-            long_ref,           long_ref_count
-        );
-
-        init_picture_refs_fields(
-            picture,
-            priv->RefPicList1, &priv->RefPicList1_count,
-            short_ref1,         short_ref1_count,
-            long_ref,           long_ref_count
-        );
-   }
-
-    /* Check whether RefPicList1 is identical to RefPicList0, then
-       swap if necessary */
-    if (priv->RefPicList1_count > 1 &&
-        priv->RefPicList1_count == priv->RefPicList0_count &&
-        memcmp(priv->RefPicList0, priv->RefPicList1,
-               priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
-        GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
-        priv->RefPicList1[0] = priv->RefPicList1[1];
-        priv->RefPicList1[1] = tmp;
+    /* RefPicList0 */
+    if (priv->short_ref_count > 0) {
+      // 1. Short-term references
+      ref_list = priv->RefPicList0;
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc < picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_dec);
+      priv->RefPicList0_count += n;
+
+      ref_list = &priv->RefPicList0[priv->RefPicList0_count];
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc >= picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_inc);
+      priv->RefPicList0_count += n;
+    }
+
+    if (priv->long_ref_count > 0) {
+      // 2. Long-term references
+      ref_list = &priv->RefPicList0[priv->RefPicList0_count];
+      for (n = 0, i = 0; i < priv->long_ref_count; i++)
+        ref_list[n++] = priv->long_ref[i];
+      SORT_REF_LIST (ref_list, n, long_term_pic_num_inc);
+      priv->RefPicList0_count += n;
     }
 
-    if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
-        /* RefPicList0 */
-        init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
+    /* RefPicList1 */
+    if (priv->short_ref_count > 0) {
+      // 1. Short-term references
+      ref_list = priv->RefPicList1;
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc > picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_inc);
+      priv->RefPicList1_count += n;
+
+      ref_list = &priv->RefPicList1[priv->RefPicList1_count];
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc <= picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_dec);
+      priv->RefPicList1_count += n;
+    }
+
+    if (priv->long_ref_count > 0) {
+      // 2. Long-term references
+      ref_list = &priv->RefPicList1[priv->RefPicList1_count];
+      for (n = 0, i = 0; i < priv->long_ref_count; i++)
+        ref_list[n++] = priv->long_ref[i];
+      SORT_REF_LIST (ref_list, n, long_term_pic_num_inc);
+      priv->RefPicList1_count += n;
+    }
+  } else {
+    /* 8.2.4.2.4 - B slices in fields */
+    GstVaapiPictureH264 *short_ref0[32];
+    guint short_ref0_count = 0;
+    GstVaapiPictureH264 *short_ref1[32];
+    guint short_ref1_count = 0;
+    GstVaapiPictureH264 *long_ref[32];
+    guint long_ref_count = 0;
+
+    /* refFrameList0ShortTerm */
+    if (priv->short_ref_count > 0) {
+      ref_list = short_ref0;
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc <= picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_dec);
+      short_ref0_count += n;
+
+      ref_list = &short_ref0[short_ref0_count];
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc > picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_inc);
+      short_ref0_count += n;
+    }
+
+    /* refFrameList1ShortTerm */
+    if (priv->short_ref_count > 0) {
+      ref_list = short_ref1;
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc > picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_inc);
+      short_ref1_count += n;
+
+      ref_list = &short_ref1[short_ref1_count];
+      for (n = 0, i = 0; i < priv->short_ref_count; i++) {
+        if (priv->short_ref[i]->base.poc <= picture->base.poc)
+          ref_list[n++] = priv->short_ref[i];
+      }
+      SORT_REF_LIST (ref_list, n, poc_dec);
+      short_ref1_count += n;
+    }
+
+    /* refFrameListLongTerm */
+    if (priv->long_ref_count > 0) {
+      for (i = 0; i < priv->long_ref_count; i++)
+        long_ref[i] = priv->long_ref[i];
+      SORT_REF_LIST (long_ref, i, long_term_frame_idx_inc);
+      long_ref_count = i;
+    }
+
+    init_picture_refs_fields (picture,
+        priv->RefPicList0, &priv->RefPicList0_count,
+        short_ref0, short_ref0_count, long_ref, long_ref_count);
+
+    init_picture_refs_fields (picture,
+        priv->RefPicList1, &priv->RefPicList1_count,
+        short_ref1, short_ref1_count, long_ref, long_ref_count);
+  }
+
+  /* Check whether RefPicList1 is identical to RefPicList0, then
+     swap if necessary */
+  if (priv->RefPicList1_count > 1 &&
+      priv->RefPicList1_count == priv->RefPicList0_count &&
+      memcmp (priv->RefPicList0, priv->RefPicList1,
+          priv->RefPicList0_count * sizeof (priv->RefPicList0[0])) == 0) {
+    GstVaapiPictureH264 *const tmp = priv->RefPicList1[0];
+    priv->RefPicList1[0] = priv->RefPicList1[1];
+    priv->RefPicList1[1] = tmp;
+  }
+
+  if (GST_VAAPI_PICTURE_IS_MVC (picture)) {
+    /* RefPicList0 */
+    init_picture_refs_mvc (decoder, picture, slice_hdr, 0);
 
-        /* RefPicList1 */
-        init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
-    }
+    /* RefPicList1 */
+    init_picture_refs_mvc (decoder, picture, slice_hdr, 1);
+  }
 }
 
 #undef SORT_REF_LIST
 
 static gint
-find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
+find_short_term_reference (GstVaapiDecoderH264 * decoder, gint32 pic_num)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i;
 
-    for (i = 0; i < priv->short_ref_count; i++) {
-        if (priv->short_ref[i]->pic_num == pic_num)
-            return i;
-    }
-    GST_ERROR("found no short-term reference picture with PicNum = %d",
-              pic_num);
-    return -1;
+  for (i = 0; i < priv->short_ref_count; i++) {
+    if (priv->short_ref[i]->pic_num == pic_num)
+      return i;
+  }
+  GST_ERROR ("found no short-term reference picture with PicNum = %d", pic_num);
+  return -1;
 }
 
 static gint
-find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
+find_long_term_reference (GstVaapiDecoderH264 * decoder,
+    gint32 long_term_pic_num)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i;
 
-    for (i = 0; i < priv->long_ref_count; i++) {
-        if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
-            return i;
-    }
-    GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
-              long_term_pic_num);
-    return -1;
+  for (i = 0; i < priv->long_ref_count; i++) {
+    if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
+      return i;
+  }
+  GST_ERROR ("found no long-term reference picture with LongTermPicNum = %d",
+      long_term_pic_num);
+  return -1;
 }
 
 static void
-exec_picture_refs_modification_1(
-    GstVaapiDecoderH264           *decoder,
-    GstVaapiPictureH264           *picture,
-    GstH264SliceHdr               *slice_hdr,
-    guint                          list
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    GstH264RefPicListModification *ref_pic_list_modification;
-    guint num_ref_pic_list_modifications;
-    GstVaapiPictureH264 **ref_list;
-    guint *ref_list_count_ptr, ref_list_idx = 0;
-    const guint16 *view_ids = NULL;
-    guint i, j, n, num_refs, num_view_ids = 0;
-    gint found_ref_idx;
-    gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
-
-    GST_DEBUG("modification process of reference picture list %u", list);
-
-    if (list == 0) {
-        ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
-        num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
-        ref_list                       = priv->RefPicList0;
-        ref_list_count_ptr             = &priv->RefPicList0_count;
-        num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
-
-        if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
-            sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
-            const GstH264SPSExtMVCView * const view =
-                &sps->extension.mvc.view[picture->base.voc];
-            if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
-                view_ids = view->anchor_ref_l0;
-                num_view_ids = view->num_anchor_refs_l0;
-            }
-            else {
-                view_ids = view->non_anchor_ref_l0;
-                num_view_ids = view->num_non_anchor_refs_l0;
-            }
-        }
-    }
-    else {
-        ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
-        num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
-        ref_list                       = priv->RefPicList1;
-        ref_list_count_ptr             = &priv->RefPicList1_count;
-        num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
-
-        if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
-            sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
-            const GstH264SPSExtMVCView * const view =
-                &sps->extension.mvc.view[picture->base.voc];
-            if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
-                view_ids = view->anchor_ref_l1;
-                num_view_ids = view->num_anchor_refs_l1;
-            }
-            else {
-                view_ids = view->non_anchor_ref_l1;
-                num_view_ids = view->num_non_anchor_refs_l1;
-            }
-        }
-    }
-
-    if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
-        MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
-        CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
-    }
-    else {
-        MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
-        CurrPicNum = slice_hdr->frame_num;                      // frame_num
-    }
-
-    picNumPred = CurrPicNum;
-    picViewIdxPred = -1;
-
-    for (i = 0; i < num_ref_pic_list_modifications; i++) {
-        GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
-        if (l->modification_of_pic_nums_idc == 3)
-            break;
-
-        /* 8.2.4.3.1 - Short-term reference pictures */
-        if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
-            gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
-            gint32 picNum, picNumNoWrap;
-
-            // (8-34)
-            if (l->modification_of_pic_nums_idc == 0) {
-                picNumNoWrap = picNumPred - abs_diff_pic_num;
-                if (picNumNoWrap < 0)
-                    picNumNoWrap += MaxPicNum;
-            }
-
-            // (8-35)
-            else {
-                picNumNoWrap = picNumPred + abs_diff_pic_num;
-                if (picNumNoWrap >= MaxPicNum)
-                    picNumNoWrap -= MaxPicNum;
-            }
-            picNumPred = picNumNoWrap;
-
-            // (8-36)
-            picNum = picNumNoWrap;
-            if (picNum > CurrPicNum)
-                picNum -= MaxPicNum;
-
-            // (8-37)
-            for (j = num_refs; j > ref_list_idx; j--)
-                ref_list[j] = ref_list[j - 1];
-            found_ref_idx = find_short_term_reference(decoder, picNum);
-            ref_list[ref_list_idx++] =
-                found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
-            n = ref_list_idx;
-            for (j = ref_list_idx; j <= num_refs; j++) {
-                gint32 PicNumF;
-                if (!ref_list[j])
-                    continue;
-                PicNumF =
-                    GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
-                    ref_list[j]->pic_num : MaxPicNum;
-                if (PicNumF != picNum ||
-                    ref_list[j]->base.view_id != picture->base.view_id)
-                    ref_list[n++] = ref_list[j];
-            }
-        }
-
-        /* 8.2.4.3.2 - Long-term reference pictures */
-        else if (l->modification_of_pic_nums_idc == 2) {
-
-            for (j = num_refs; j > ref_list_idx; j--)
-                ref_list[j] = ref_list[j - 1];
-            found_ref_idx =
-                find_long_term_reference(decoder, l->value.long_term_pic_num);
-            ref_list[ref_list_idx++] =
-                found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
-            n = ref_list_idx;
-            for (j = ref_list_idx; j <= num_refs; j++) {
-                gint32 LongTermPicNumF;
-                if (!ref_list[j])
-                    continue;
-                LongTermPicNumF =
-                    GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
-                    ref_list[j]->long_term_pic_num : INT_MAX;
-                if (LongTermPicNumF != l->value.long_term_pic_num ||
-                    ref_list[j]->base.view_id != picture->base.view_id)
-                    ref_list[n++] = ref_list[j];
-            }
-        }
-
-        /* H.8.2.2.3 - Inter-view prediction reference pictures */
-        else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
-                  sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
-                 (l->modification_of_pic_nums_idc == 4 ||
-                  l->modification_of_pic_nums_idc == 5)) {
-            gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
-            gint32 picViewIdx, targetViewId;
-
-            // (H-6)
-            if (l->modification_of_pic_nums_idc == 4) {
-                picViewIdx = picViewIdxPred - abs_diff_view_idx;
-                if (picViewIdx < 0)
-                    picViewIdx += num_view_ids;
-            }
-
-            // (H-7)
-            else {
-                picViewIdx = picViewIdxPred + abs_diff_view_idx;
-                if (picViewIdx >= num_view_ids)
-                    picViewIdx -= num_view_ids;
-            }
-            picViewIdxPred = picViewIdx;
-
-            // (H-8, H-9)
-            targetViewId = view_ids[picViewIdx];
-
-            // (H-10)
-            for (j = num_refs; j > ref_list_idx; j--)
-                ref_list[j] = ref_list[j - 1];
-            ref_list[ref_list_idx++] =
-                find_inter_view_reference(decoder, targetViewId);
-            n = ref_list_idx;
-            for (j = ref_list_idx; j <= num_refs; j++) {
-                if (!ref_list[j])
-                    continue;
-                if (ref_list[j]->base.view_id != targetViewId ||
-                    ref_list[j]->base.poc != picture->base.poc)
-                    ref_list[n++] = ref_list[j];
-            }
-        }
-    }
+exec_picture_refs_modification_1 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr, guint list)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  GstH264RefPicListModification *ref_pic_list_modification;
+  guint num_ref_pic_list_modifications;
+  GstVaapiPictureH264 **ref_list;
+  guint *ref_list_count_ptr, ref_list_idx = 0;
+  const guint16 *view_ids = NULL;
+  guint i, j, n, num_refs, num_view_ids = 0;
+  gint found_ref_idx;
+  gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
+
+  GST_DEBUG ("modification process of reference picture list %u", list);
+
+  if (list == 0) {
+    ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
+    num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
+    ref_list = priv->RefPicList0;
+    ref_list_count_ptr = &priv->RefPicList0_count;
+    num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
+
+    if (GST_VAAPI_PICTURE_IS_MVC (picture) &&
+        sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+      const GstH264SPSExtMVCView *const view =
+          &sps->extension.mvc.view[picture->base.voc];
+      if (GST_VAAPI_PICTURE_IS_ANCHOR (picture)) {
+        view_ids = view->anchor_ref_l0;
+        num_view_ids = view->num_anchor_refs_l0;
+      } else {
+        view_ids = view->non_anchor_ref_l0;
+        num_view_ids = view->num_non_anchor_refs_l0;
+      }
+    }
+  } else {
+    ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
+    num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
+    ref_list = priv->RefPicList1;
+    ref_list_count_ptr = &priv->RefPicList1_count;
+    num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
+
+    if (GST_VAAPI_PICTURE_IS_MVC (picture) &&
+        sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+      const GstH264SPSExtMVCView *const view =
+          &sps->extension.mvc.view[picture->base.voc];
+      if (GST_VAAPI_PICTURE_IS_ANCHOR (picture)) {
+        view_ids = view->anchor_ref_l1;
+        num_view_ids = view->num_anchor_refs_l1;
+      } else {
+        view_ids = view->non_anchor_ref_l1;
+        num_view_ids = view->num_non_anchor_refs_l1;
+      }
+    }
+  }
+
+  if (!GST_VAAPI_PICTURE_IS_FRAME (picture)) {
+    MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5);      // 2 * MaxFrameNum
+    CurrPicNum = 2 * slice_hdr->frame_num + 1;  // 2 * frame_num + 1
+  } else {
+    MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4);      // MaxFrameNum
+    CurrPicNum = slice_hdr->frame_num;  // frame_num
+  }
+
+  picNumPred = CurrPicNum;
+  picViewIdxPred = -1;
+
+  for (i = 0; i < num_ref_pic_list_modifications; i++) {
+    GstH264RefPicListModification *const l = &ref_pic_list_modification[i];
+    if (l->modification_of_pic_nums_idc == 3)
+      break;
+
+    /* 8.2.4.3.1 - Short-term reference pictures */
+    if (l->modification_of_pic_nums_idc == 0
+        || l->modification_of_pic_nums_idc == 1) {
+      gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
+      gint32 picNum, picNumNoWrap;
+
+      // (8-34)
+      if (l->modification_of_pic_nums_idc == 0) {
+        picNumNoWrap = picNumPred - abs_diff_pic_num;
+        if (picNumNoWrap < 0)
+          picNumNoWrap += MaxPicNum;
+      }
+      // (8-35)
+      else {
+        picNumNoWrap = picNumPred + abs_diff_pic_num;
+        if (picNumNoWrap >= MaxPicNum)
+          picNumNoWrap -= MaxPicNum;
+      }
+      picNumPred = picNumNoWrap;
+
+      // (8-36)
+      picNum = picNumNoWrap;
+      if (picNum > CurrPicNum)
+        picNum -= MaxPicNum;
+
+      // (8-37)
+      for (j = num_refs; j > ref_list_idx; j--)
+        ref_list[j] = ref_list[j - 1];
+      found_ref_idx = find_short_term_reference (decoder, picNum);
+      ref_list[ref_list_idx++] =
+          found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
+      n = ref_list_idx;
+      for (j = ref_list_idx; j <= num_refs; j++) {
+        gint32 PicNumF;
+        if (!ref_list[j])
+          continue;
+        PicNumF =
+            GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (ref_list[j]) ?
+            ref_list[j]->pic_num : MaxPicNum;
+        if (PicNumF != picNum ||
+            ref_list[j]->base.view_id != picture->base.view_id)
+          ref_list[n++] = ref_list[j];
+      }
+    }
+
+    /* 8.2.4.3.2 - Long-term reference pictures */
+    else if (l->modification_of_pic_nums_idc == 2) {
+
+      for (j = num_refs; j > ref_list_idx; j--)
+        ref_list[j] = ref_list[j - 1];
+      found_ref_idx =
+          find_long_term_reference (decoder, l->value.long_term_pic_num);
+      ref_list[ref_list_idx++] =
+          found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
+      n = ref_list_idx;
+      for (j = ref_list_idx; j <= num_refs; j++) {
+        gint32 LongTermPicNumF;
+        if (!ref_list[j])
+          continue;
+        LongTermPicNumF =
+            GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (ref_list[j]) ?
+            ref_list[j]->long_term_pic_num : INT_MAX;
+        if (LongTermPicNumF != l->value.long_term_pic_num ||
+            ref_list[j]->base.view_id != picture->base.view_id)
+          ref_list[n++] = ref_list[j];
+      }
+    }
+
+    /* H.8.2.2.3 - Inter-view prediction reference pictures */
+    else if ((GST_VAAPI_PICTURE_IS_MVC (picture) &&
+            sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
+        (l->modification_of_pic_nums_idc == 4 ||
+            l->modification_of_pic_nums_idc == 5)) {
+      gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
+      gint32 picViewIdx, targetViewId;
+
+      // (H-6)
+      if (l->modification_of_pic_nums_idc == 4) {
+        picViewIdx = picViewIdxPred - abs_diff_view_idx;
+        if (picViewIdx < 0)
+          picViewIdx += num_view_ids;
+      }
+      // (H-7)
+      else {
+        picViewIdx = picViewIdxPred + abs_diff_view_idx;
+        if (picViewIdx >= num_view_ids)
+          picViewIdx -= num_view_ids;
+      }
+      picViewIdxPred = picViewIdx;
+
+      // (H-8, H-9)
+      targetViewId = view_ids[picViewIdx];
+
+      // (H-10)
+      for (j = num_refs; j > ref_list_idx; j--)
+        ref_list[j] = ref_list[j - 1];
+      ref_list[ref_list_idx++] =
+          find_inter_view_reference (decoder, targetViewId);
+      n = ref_list_idx;
+      for (j = ref_list_idx; j <= num_refs; j++) {
+        if (!ref_list[j])
+          continue;
+        if (ref_list[j]->base.view_id != targetViewId ||
+            ref_list[j]->base.poc != picture->base.poc)
+          ref_list[n++] = ref_list[j];
+      }
+    }
+  }
 
 #if DEBUG
-    for (i = 0; i < num_refs; i++)
-        if (!ref_list[i])
-            GST_ERROR("list %u entry %u is empty", list, i);
+  for (i = 0; i < num_refs; i++)
+    if (!ref_list[i])
+      GST_ERROR ("list %u entry %u is empty", list, i);
 #endif
-    *ref_list_count_ptr = num_refs;
+  *ref_list_count_ptr = num_refs;
 }
 
 /* 8.2.4.3 - Modification process for reference picture lists */
 static void
-exec_picture_refs_modification(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
+exec_picture_refs_modification (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
 {
-    GST_DEBUG("execute ref_pic_list_modification()");
+  GST_DEBUG ("execute ref_pic_list_modification()");
 
-    /* RefPicList0 */
-    if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
-        slice_hdr->ref_pic_list_modification_flag_l0)
-        exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
+  /* RefPicList0 */
+  if (!GST_H264_IS_I_SLICE (slice_hdr) && !GST_H264_IS_SI_SLICE (slice_hdr) &&
+      slice_hdr->ref_pic_list_modification_flag_l0)
+    exec_picture_refs_modification_1 (decoder, picture, slice_hdr, 0);
 
-    /* RefPicList1 */
-    if (GST_H264_IS_B_SLICE(slice_hdr) &&
-        slice_hdr->ref_pic_list_modification_flag_l1)
-        exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
+  /* RefPicList1 */
+  if (GST_H264_IS_B_SLICE (slice_hdr) &&
+      slice_hdr->ref_pic_list_modification_flag_l1)
+    exec_picture_refs_modification_1 (decoder, picture, slice_hdr, 1);
 }
 
 static gboolean
-check_picture_ref_corruption(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *RefPicList[32], guint RefPicList_count)
+check_picture_ref_corruption (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * RefPicList[32], guint RefPicList_count)
 {
-    const guint corrupted_flags =
-        GST_VAAPI_PICTURE_FLAG_CORRUPTED | GST_VAAPI_PICTURE_FLAG_GHOST;
-    guint i;
+  const guint corrupted_flags =
+      GST_VAAPI_PICTURE_FLAG_CORRUPTED | GST_VAAPI_PICTURE_FLAG_GHOST;
+  guint i;
 
-    for (i = 0; i < RefPicList_count; i++) {
-        GstVaapiPictureH264 * const picture = RefPicList[i];
-        if (picture && (GST_VAAPI_PICTURE_FLAGS(picture) & corrupted_flags))
-            return TRUE;
-    }
-    return FALSE;
+  for (i = 0; i < RefPicList_count; i++) {
+    GstVaapiPictureH264 *const picture = RefPicList[i];
+    if (picture && (GST_VAAPI_PICTURE_FLAGS (picture) & corrupted_flags))
+      return TRUE;
+  }
+  return FALSE;
 }
 
 static void
-mark_picture_refs(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+mark_picture_refs (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    if (GST_VAAPI_PICTURE_IS_CORRUPTED(picture))
-        return;
+  if (GST_VAAPI_PICTURE_IS_CORRUPTED (picture))
+    return;
 
-    if (check_picture_ref_corruption(decoder,
-            priv->RefPicList0, priv->RefPicList0_count) ||
-        check_picture_ref_corruption(decoder,
-            priv->RefPicList1, priv->RefPicList1_count))
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_CORRUPTED);
+  if (check_picture_ref_corruption (decoder,
+          priv->RefPicList0, priv->RefPicList0_count) ||
+      check_picture_ref_corruption (decoder,
+          priv->RefPicList1, priv->RefPicList1_count))
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_CORRUPTED);
 }
 
 static void
-init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i, j, short_ref_count, long_ref_count;
-
-    short_ref_count = 0;
-    long_ref_count  = 0;
-    if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
-        for (i = 0; i < priv->dpb_count; i++) {
-            GstVaapiFrameStore * const fs = priv->dpb[i];
-            GstVaapiPictureH264 *pic;
-            if (!gst_vaapi_frame_store_has_frame(fs))
-                continue;
-            pic = fs->buffers[0];
-            if (pic->base.view_id != picture->base.view_id)
-                continue;
-            if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
-                priv->short_ref[short_ref_count++] = pic;
-            else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
-                priv->long_ref[long_ref_count++] = pic;
-            pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
-            pic->other_field = fs->buffers[1];
-        }
-    }
-    else {
-        for (i = 0; i < priv->dpb_count; i++) {
-            GstVaapiFrameStore * const fs = priv->dpb[i];
-            for (j = 0; j < fs->num_buffers; j++) {
-                GstVaapiPictureH264 * const pic = fs->buffers[j];
-                if (pic->base.view_id != picture->base.view_id)
-                    continue;
-                if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
-                    priv->short_ref[short_ref_count++] = pic;
-                else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
-                    priv->long_ref[long_ref_count++] = pic;
-                pic->structure = pic->base.structure;
-                pic->other_field = fs->buffers[j ^ 1];
-            }
-        }
+init_picture_ref_lists (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i, j, short_ref_count, long_ref_count;
+
+  short_ref_count = 0;
+  long_ref_count = 0;
+  if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
+    for (i = 0; i < priv->dpb_count; i++) {
+      GstVaapiFrameStore *const fs = priv->dpb[i];
+      GstVaapiPictureH264 *pic;
+      if (!gst_vaapi_frame_store_has_frame (fs))
+        continue;
+      pic = fs->buffers[0];
+      if (pic->base.view_id != picture->base.view_id)
+        continue;
+      if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (pic))
+        priv->short_ref[short_ref_count++] = pic;
+      else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (pic))
+        priv->long_ref[long_ref_count++] = pic;
+      pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+      pic->other_field = fs->buffers[1];
+    }
+  } else {
+    for (i = 0; i < priv->dpb_count; i++) {
+      GstVaapiFrameStore *const fs = priv->dpb[i];
+      for (j = 0; j < fs->num_buffers; j++) {
+        GstVaapiPictureH264 *const pic = fs->buffers[j];
+        if (pic->base.view_id != picture->base.view_id)
+          continue;
+        if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (pic))
+          priv->short_ref[short_ref_count++] = pic;
+        else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (pic))
+          priv->long_ref[long_ref_count++] = pic;
+        pic->structure = pic->base.structure;
+        pic->other_field = fs->buffers[j ^ 1];
+      }
     }
+  }
 
-    for (i = short_ref_count; i < priv->short_ref_count; i++)
-        priv->short_ref[i] = NULL;
-    priv->short_ref_count = short_ref_count;
+  for (i = short_ref_count; i < priv->short_ref_count; i++)
+    priv->short_ref[i] = NULL;
+  priv->short_ref_count = short_ref_count;
 
-    for (i = long_ref_count; i < priv->long_ref_count; i++)
-        priv->long_ref[i] = NULL;
-    priv->long_ref_count = long_ref_count;
+  for (i = long_ref_count; i < priv->long_ref_count; i++)
+    priv->long_ref[i] = NULL;
+  priv->long_ref_count = long_ref_count;
 }
 
 static void
-init_picture_refs(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture,
-    GstH264SliceHdr     *slice_hdr
-)
+init_picture_refs (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    guint i, num_refs;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  guint i, num_refs;
 
-    init_picture_ref_lists(decoder, picture);
-    init_picture_refs_pic_num(decoder, picture, slice_hdr);
+  init_picture_ref_lists (decoder, picture);
+  init_picture_refs_pic_num (decoder, picture, slice_hdr);
 
-    priv->RefPicList0_count = 0;
-    priv->RefPicList1_count = 0;
+  priv->RefPicList0_count = 0;
+  priv->RefPicList1_count = 0;
 
-    switch (slice_hdr->type % 5) {
+  switch (slice_hdr->type % 5) {
     case GST_H264_P_SLICE:
     case GST_H264_SP_SLICE:
-        init_picture_refs_p_slice(decoder, picture, slice_hdr);
-        break;
+      init_picture_refs_p_slice (decoder, picture, slice_hdr);
+      break;
     case GST_H264_B_SLICE:
-        init_picture_refs_b_slice(decoder, picture, slice_hdr);
-        break;
+      init_picture_refs_b_slice (decoder, picture, slice_hdr);
+      break;
     default:
-        break;
-    }
+      break;
+  }
 
-    exec_picture_refs_modification(decoder, picture, slice_hdr);
+  exec_picture_refs_modification (decoder, picture, slice_hdr);
 
-    switch (slice_hdr->type % 5) {
+  switch (slice_hdr->type % 5) {
     case GST_H264_B_SLICE:
-        num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
-        for (i = priv->RefPicList1_count; i < num_refs; i++)
-            priv->RefPicList1[i] = NULL;
-        priv->RefPicList1_count = num_refs;
+      num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
+      for (i = priv->RefPicList1_count; i < num_refs; i++)
+        priv->RefPicList1[i] = NULL;
+      priv->RefPicList1_count = num_refs;
 
-        // fall-through
+      // fall-through
     case GST_H264_P_SLICE:
     case GST_H264_SP_SLICE:
-        num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
-        for (i = priv->RefPicList0_count; i < num_refs; i++)
-            priv->RefPicList0[i] = NULL;
-        priv->RefPicList0_count = num_refs;
-        break;
+      num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
+      for (i = priv->RefPicList0_count; i < num_refs; i++)
+        priv->RefPicList0[i] = NULL;
+      priv->RefPicList0_count = num_refs;
+      break;
     default:
-        break;
-    }
+      break;
+  }
 
-    mark_picture_refs(decoder, picture);
+  mark_picture_refs (decoder, picture);
 }
 
 static GstVaapiPictureH264 *
-fill_picture_first_field_gap(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *f0)
+fill_picture_first_field_gap (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * f0)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *f1;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *f1;
 
-    f1 = gst_vaapi_picture_h264_new_clone(f0);
-    if (!f1)
-        goto error_allocate_field;
+  f1 = gst_vaapi_picture_h264_new_clone (f0);
+  if (!f1)
+    goto error_allocate_field;
 
-    gst_vaapi_picture_replace(&priv->missing_picture, f1);
-    gst_vaapi_picture_unref(f1);
+  gst_vaapi_picture_replace (&priv->missing_picture, f1);
+  gst_vaapi_picture_unref (f1);
 
-    GST_VAAPI_PICTURE_FLAG_SET(f1,
-        (GST_VAAPI_PICTURE_FLAG_ONEFIELD |
-         GST_VAAPI_PICTURE_FLAG_SKIPPED |
-         GST_VAAPI_PICTURE_FLAG_GHOST));
+  GST_VAAPI_PICTURE_FLAG_SET (f1,
+      (GST_VAAPI_PICTURE_FLAG_ONEFIELD |
+          GST_VAAPI_PICTURE_FLAG_SKIPPED | GST_VAAPI_PICTURE_FLAG_GHOST));
 
-    gst_vaapi_picture_h264_set_reference(f1, 0, FALSE);
-    return f1;
+  gst_vaapi_picture_h264_set_reference (f1, 0, FALSE);
+  return f1;
 
-    /* ERRORS */
+  /* ERRORS */
 error_allocate_field:
-    GST_ERROR("failed to allocate missing field for current frame store");
-    return NULL;
+  GST_ERROR ("failed to allocate missing field for current frame store");
+  return NULL;
 }
 
 static gboolean
-fill_picture_first_field_gap_done(GstVaapiDecoderH264 *decoder,
-    GstH264SliceHdr *slice_hdr)
+fill_picture_first_field_gap_done (GstVaapiDecoderH264 * decoder,
+    GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 * const lost_field = priv->missing_picture;
-    GstH264SliceHdr lost_slice_hdr;
-    gboolean success = FALSE;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *const lost_field = priv->missing_picture;
+  GstH264SliceHdr lost_slice_hdr;
+  gboolean success = FALSE;
 
-    g_return_val_if_fail(priv->current_picture != NULL, FALSE);
+  g_return_val_if_fail (priv->current_picture != NULL, FALSE);
 
-    if (!lost_field)
-        return TRUE;
+  if (!lost_field)
+    return TRUE;
 
-    lost_field->frame_num       = slice_hdr->frame_num;
-    lost_field->frame_num_wrap  = slice_hdr->frame_num;
+  lost_field->frame_num = slice_hdr->frame_num;
+  lost_field->frame_num_wrap = slice_hdr->frame_num;
 
-    gst_vaapi_picture_h264_set_reference(lost_field,
-        (GST_VAAPI_PICTURE_FLAGS(priv->current_picture) &
-         GST_VAAPI_PICTURE_FLAGS_REFERENCE), FALSE);
+  gst_vaapi_picture_h264_set_reference (lost_field,
+      (GST_VAAPI_PICTURE_FLAGS (priv->current_picture) &
+          GST_VAAPI_PICTURE_FLAGS_REFERENCE), FALSE);
 
-    lost_slice_hdr = *slice_hdr;
-    lost_slice_hdr.bottom_field_flag = !lost_slice_hdr.bottom_field_flag;
+  lost_slice_hdr = *slice_hdr;
+  lost_slice_hdr.bottom_field_flag = !lost_slice_hdr.bottom_field_flag;
 
-    init_picture_poc(decoder, lost_field, &lost_slice_hdr);
-    init_picture_ref_lists(decoder, lost_field);
-    init_picture_refs_pic_num(decoder, lost_field, &lost_slice_hdr);
-    if (!exec_ref_pic_marking_sliding_window(decoder))
-        goto error_exec_ref_pic_marking;
-    if (!dpb_add(decoder, lost_field))
-        goto error_dpb_add;
-    success = TRUE;
+  init_picture_poc (decoder, lost_field, &lost_slice_hdr);
+  init_picture_ref_lists (decoder, lost_field);
+  init_picture_refs_pic_num (decoder, lost_field, &lost_slice_hdr);
+  if (!exec_ref_pic_marking_sliding_window (decoder))
+    goto error_exec_ref_pic_marking;
+  if (!dpb_add (decoder, lost_field))
+    goto error_dpb_add;
+  success = TRUE;
 
 cleanup:
-    gst_vaapi_picture_replace(&priv->missing_picture, NULL);
-    return success;
+  gst_vaapi_picture_replace (&priv->missing_picture, NULL);
+  return success;
 
-    /* ERRORS */
+  /* ERRORS */
 error_exec_ref_pic_marking:
-    GST_ERROR("failed to execute reference picture marking process");
-    goto cleanup;
+  GST_ERROR ("failed to execute reference picture marking process");
+  goto cleanup;
 error_dpb_add:
-    GST_ERROR("failed to store lost picture into the DPB");
-    goto cleanup;
+  GST_ERROR ("failed to store lost picture into the DPB");
+  goto cleanup;
 }
 
 static GstVaapiPictureH264 *
-fill_picture_other_field_gap(GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *f0)
+fill_picture_other_field_gap (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * f0)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *prev_picture, *f1;
-    gint prev_frame_index;
-    guint picture_structure;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *prev_picture, *f1;
+  gint prev_frame_index;
+  guint picture_structure;
 
-    picture_structure = f0->base.structure;
-    switch (picture_structure) {
+  picture_structure = f0->base.structure;
+  switch (picture_structure) {
     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
-        picture_structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
-        break;
+      picture_structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
-        picture_structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
-        break;
+      picture_structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
+      break;
     default:
-        g_assert(0 && "unexpected picture structure");
-        return NULL;
-    }
-    GST_VAAPI_PICTURE_FLAG_SET(f0, GST_VAAPI_PICTURE_FLAG_ONEFIELD);
-
-    prev_frame_index = dpb_find_nearest_prev_poc(decoder, f0,
-        picture_structure, &prev_picture);
-    if (prev_frame_index < 0)
-        goto error_find_field;
-
-    f1 = gst_vaapi_picture_h264_new_field(f0);
-    if (!f1)
-        goto error_allocate_field;
-
-    gst_vaapi_surface_proxy_replace(&f1->base.proxy, prev_picture->base.proxy);
-    f1->base.surface = GST_VAAPI_SURFACE_PROXY_SURFACE(f1->base.proxy);
-    f1->base.surface_id = GST_VAAPI_SURFACE_PROXY_SURFACE_ID(f1->base.proxy);
-    f1->base.poc++;
-    f1->structure = f1->base.structure;
-
-    /* XXX: clone other H.264 picture specific flags */
-    GST_VAAPI_PICTURE_FLAG_SET(f1,
-        (GST_VAAPI_PICTURE_FLAG_SKIPPED |
-         GST_VAAPI_PICTURE_FLAG_GHOST));
-
-    gst_vaapi_picture_h264_set_reference(f1, 0, FALSE);
-    gst_vaapi_picture_replace(&priv->current_picture, f1);
-    gst_vaapi_picture_unref(f1);
-
-    init_picture_ref_lists(decoder, f1);
-    init_picture_refs_pic_num(decoder, f1, NULL);
-    if (!exec_ref_pic_marking_sliding_window(decoder))
-        goto error_exec_ref_pic_marking;
-    if (!dpb_add(decoder, f1))
-        goto error_append_field;
-    return f1;
-
-    /* ERRORS */
+      g_assert (0 && "unexpected picture structure");
+      return NULL;
+  }
+  GST_VAAPI_PICTURE_FLAG_SET (f0, GST_VAAPI_PICTURE_FLAG_ONEFIELD);
+
+  prev_frame_index = dpb_find_nearest_prev_poc (decoder, f0,
+      picture_structure, &prev_picture);
+  if (prev_frame_index < 0)
+    goto error_find_field;
+
+  f1 = gst_vaapi_picture_h264_new_field (f0);
+  if (!f1)
+    goto error_allocate_field;
+
+  gst_vaapi_surface_proxy_replace (&f1->base.proxy, prev_picture->base.proxy);
+  f1->base.surface = GST_VAAPI_SURFACE_PROXY_SURFACE (f1->base.proxy);
+  f1->base.surface_id = GST_VAAPI_SURFACE_PROXY_SURFACE_ID (f1->base.proxy);
+  f1->base.poc++;
+  f1->structure = f1->base.structure;
+
+  /* XXX: clone other H.264 picture specific flags */
+  GST_VAAPI_PICTURE_FLAG_SET (f1,
+      (GST_VAAPI_PICTURE_FLAG_SKIPPED | GST_VAAPI_PICTURE_FLAG_GHOST));
+
+  gst_vaapi_picture_h264_set_reference (f1, 0, FALSE);
+  gst_vaapi_picture_replace (&priv->current_picture, f1);
+  gst_vaapi_picture_unref (f1);
+
+  init_picture_ref_lists (decoder, f1);
+  init_picture_refs_pic_num (decoder, f1, NULL);
+  if (!exec_ref_pic_marking_sliding_window (decoder))
+    goto error_exec_ref_pic_marking;
+  if (!dpb_add (decoder, f1))
+    goto error_append_field;
+  return f1;
+
+  /* ERRORS */
 error_find_field:
-    GST_ERROR("failed to find field with POC nearest to %d", f0->base.poc);
-    return NULL;
+  GST_ERROR ("failed to find field with POC nearest to %d", f0->base.poc);
+  return NULL;
 error_allocate_field:
-    GST_ERROR("failed to allocate missing field for previous frame store");
-    return NULL;
+  GST_ERROR ("failed to allocate missing field for previous frame store");
+  return NULL;
 error_exec_ref_pic_marking:
-    GST_ERROR("failed to execute reference picture marking process");
-    return NULL;
+  GST_ERROR ("failed to execute reference picture marking process");
+  return NULL;
 error_append_field:
-    GST_ERROR("failed to add missing field into previous frame store");
-    return NULL;
+  GST_ERROR ("failed to add missing field into previous frame store");
+  return NULL;
 }
 
 static gboolean
-fill_picture_gaps(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
-    GstH264SliceHdr *slice_hdr)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
-    GstVaapiFrameStore *prev_frame;
-    GstVaapiPicture *base_picture;
-    GstVaapiPictureH264 *lost_picture, *prev_picture;
-    GstH264SliceHdr lost_slice_hdr;
-    gboolean success = FALSE;
-
-    if (priv->prev_ref_frame_num == priv->frame_num)
-        return TRUE;
-    if ((priv->prev_ref_frame_num + 1) % MaxFrameNum == priv->frame_num)
-        return TRUE;
-    if (priv->dpb_count == 0)
-        return TRUE;
-
-    prev_frame = priv->prev_ref_frames[picture->base.voc];
-    g_assert(prev_frame != NULL);
-    prev_picture = gst_vaapi_picture_ref(prev_frame->buffers[0]);
-    gst_vaapi_picture_ref(picture);
-
-    lost_slice_hdr = *slice_hdr;
-    lost_slice_hdr.field_pic_flag = 0;
-    if (sps->pic_order_cnt_type == 1) {
-        lost_slice_hdr.delta_pic_order_cnt[0] = 0;
-        lost_slice_hdr.delta_pic_order_cnt[1] = 0;
-    }
-    lost_slice_hdr.dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = 0;
-
-    /* XXX: this process is incorrect for MVC */
-    /* XXX: optimize to reduce the number of dummy pictures created */
-    priv->frame_num = priv->prev_ref_frame_num;
-    for (;;) {
-        priv->prev_ref_frame_num = priv->frame_num;
-        priv->frame_num = (priv->prev_ref_frame_num + 1) % MaxFrameNum;
-        if (priv->frame_num == slice_hdr->frame_num)
-            break;
-
-        /* Create new picture */
-        if (prev_picture)
-            lost_picture = gst_vaapi_picture_h264_new_clone(prev_picture);
-        else
-            lost_picture = gst_vaapi_picture_h264_new(decoder);
-        if (!lost_picture)
-            goto error_allocate_picture;
-
-        base_picture                    = &lost_picture->base;
-        base_picture->type              = GST_VAAPI_PICTURE_TYPE_NONE;
-        base_picture->pts               = GST_CLOCK_TIME_NONE;
-        base_picture->structure         = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
-        lost_picture->frame_num         = priv->frame_num;
-        lost_picture->frame_num_wrap    = priv->frame_num;
-        lost_picture->structure         = base_picture->structure;
-
-        GST_VAAPI_PICTURE_FLAG_SET(lost_picture,
-            (GST_VAAPI_PICTURE_FLAG_SKIPPED |
-             GST_VAAPI_PICTURE_FLAG_GHOST |
-             GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE));
-
-        if (sps->pic_order_cnt_type != 0)
-            init_picture_poc(decoder, lost_picture, &lost_slice_hdr);
-        else {
-            base_picture->poc = prev_picture->base.poc + 2;
-            if (prev_picture->field_poc[0] != G_MAXINT32)
-                lost_picture->field_poc[0] = prev_picture->field_poc[0] + 2;
-            if (prev_picture->field_poc[1] != G_MAXINT32)
-                lost_picture->field_poc[1] = prev_picture->field_poc[1] + 2;
-        }
+fill_picture_gaps (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture,
+    GstH264SliceHdr * slice_hdr)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+  GstVaapiFrameStore *prev_frame;
+  GstVaapiPicture *base_picture;
+  GstVaapiPictureH264 *lost_picture, *prev_picture;
+  GstH264SliceHdr lost_slice_hdr;
+  gboolean success = FALSE;
+
+  if (priv->prev_ref_frame_num == priv->frame_num)
+    return TRUE;
+  if ((priv->prev_ref_frame_num + 1) % MaxFrameNum == priv->frame_num)
+    return TRUE;
+  if (priv->dpb_count == 0)
+    return TRUE;
 
-        gst_vaapi_picture_replace(&prev_picture, lost_picture);
-        gst_vaapi_picture_replace(&priv->current_picture, lost_picture);
-        gst_vaapi_picture_unref(lost_picture);
-
-        init_picture_ref_lists(decoder, lost_picture);
-        init_picture_refs_pic_num(decoder, lost_picture, &lost_slice_hdr);
-        if (!exec_ref_pic_marking_sliding_window(decoder))
-            goto error_exec_ref_pic_marking;
-        if (!dpb_add(decoder, lost_picture))
-            goto error_dpb_add;
-        gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    }
-    success = TRUE;
+  prev_frame = priv->prev_ref_frames[picture->base.voc];
+  g_assert (prev_frame != NULL);
+  prev_picture = gst_vaapi_picture_ref (prev_frame->buffers[0]);
+  gst_vaapi_picture_ref (picture);
+
+  lost_slice_hdr = *slice_hdr;
+  lost_slice_hdr.field_pic_flag = 0;
+  if (sps->pic_order_cnt_type == 1) {
+    lost_slice_hdr.delta_pic_order_cnt[0] = 0;
+    lost_slice_hdr.delta_pic_order_cnt[1] = 0;
+  }
+  lost_slice_hdr.dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = 0;
+
+  /* XXX: this process is incorrect for MVC */
+  /* XXX: optimize to reduce the number of dummy pictures created */
+  priv->frame_num = priv->prev_ref_frame_num;
+  for (;;) {
+    priv->prev_ref_frame_num = priv->frame_num;
+    priv->frame_num = (priv->prev_ref_frame_num + 1) % MaxFrameNum;
+    if (priv->frame_num == slice_hdr->frame_num)
+      break;
+
+    /* Create new picture */
+    if (prev_picture)
+      lost_picture = gst_vaapi_picture_h264_new_clone (prev_picture);
+    else
+      lost_picture = gst_vaapi_picture_h264_new (decoder);
+    if (!lost_picture)
+      goto error_allocate_picture;
 
-cleanup:
-    priv->frame_num = slice_hdr->frame_num;
-    priv->prev_ref_frame_num = (priv->frame_num + MaxFrameNum - 1) % MaxFrameNum;
-    gst_vaapi_picture_replace(&prev_picture, NULL);
-    gst_vaapi_picture_replace(&priv->current_picture, picture);
-    gst_vaapi_picture_unref(picture);
-    return success;
+    base_picture = &lost_picture->base;
+    base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+    base_picture->pts = GST_CLOCK_TIME_NONE;
+    base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+    lost_picture->frame_num = priv->frame_num;
+    lost_picture->frame_num_wrap = priv->frame_num;
+    lost_picture->structure = base_picture->structure;
+
+    GST_VAAPI_PICTURE_FLAG_SET (lost_picture,
+        (GST_VAAPI_PICTURE_FLAG_SKIPPED |
+            GST_VAAPI_PICTURE_FLAG_GHOST |
+            GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE));
+
+    if (sps->pic_order_cnt_type != 0)
+      init_picture_poc (decoder, lost_picture, &lost_slice_hdr);
+    else {
+      base_picture->poc = prev_picture->base.poc + 2;
+      if (prev_picture->field_poc[0] != G_MAXINT32)
+        lost_picture->field_poc[0] = prev_picture->field_poc[0] + 2;
+      if (prev_picture->field_poc[1] != G_MAXINT32)
+        lost_picture->field_poc[1] = prev_picture->field_poc[1] + 2;
+    }
+
+    gst_vaapi_picture_replace (&prev_picture, lost_picture);
+    gst_vaapi_picture_replace (&priv->current_picture, lost_picture);
+    gst_vaapi_picture_unref (lost_picture);
+
+    init_picture_ref_lists (decoder, lost_picture);
+    init_picture_refs_pic_num (decoder, lost_picture, &lost_slice_hdr);
+    if (!exec_ref_pic_marking_sliding_window (decoder))
+      goto error_exec_ref_pic_marking;
+    if (!dpb_add (decoder, lost_picture))
+      goto error_dpb_add;
+    gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  }
+  success = TRUE;
 
-    /* ERRORS */
+cleanup:
+  priv->frame_num = slice_hdr->frame_num;
+  priv->prev_ref_frame_num = (priv->frame_num + MaxFrameNum - 1) % MaxFrameNum;
+  gst_vaapi_picture_replace (&prev_picture, NULL);
+  gst_vaapi_picture_replace (&priv->current_picture, picture);
+  gst_vaapi_picture_unref (picture);
+  return success;
+
+  /* ERRORS */
 error_allocate_picture:
-    GST_ERROR("failed to allocate lost picture");
-    goto cleanup;
+  GST_ERROR ("failed to allocate lost picture");
+  goto cleanup;
 error_exec_ref_pic_marking:
-    GST_ERROR("failed to execute reference picture marking process");
-    goto cleanup;
+  GST_ERROR ("failed to execute reference picture marking process");
+  goto cleanup;
 error_dpb_add:
-    GST_ERROR("failed to store lost picture into the DPB");
-    goto cleanup;
+  GST_ERROR ("failed to store lost picture into the DPB");
+  goto cleanup;
 }
 
 static gboolean
-init_picture(
-    GstVaapiDecoderH264 *decoder,
-    GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPicture * const base_picture = &picture->base;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-
-    if (priv->prev_pic_reference)
-        priv->prev_ref_frame_num = priv->frame_num;
-    priv->prev_frame_num        = priv->frame_num;
-    priv->frame_num             = slice_hdr->frame_num;
-    picture->frame_num          = priv->frame_num;
-    picture->frame_num_wrap     = priv->frame_num;
-    picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
-    base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
-    base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
-    base_picture->view_id       = pi->view_id;
-    base_picture->voc           = pi->voc;
-
-    /* Initialize extensions */
-    switch (pi->nalu.extension_type) {
-    case GST_H264_NAL_EXTENSION_MVC: {
-        GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
-
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
-        if (mvc->inter_view_flag)
-            GST_VAAPI_PICTURE_FLAG_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
-        if (mvc->anchor_pic_flag)
-            GST_VAAPI_PICTURE_FLAG_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_ANCHOR);
-        break;
-    }
-    }
-
-    /* Reset decoder state for IDR pictures */
-    if (pi->nalu.idr_pic_flag) {
-        GST_DEBUG("<IDR>");
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
-        dpb_flush(decoder, picture);
-    }
-    else if (!fill_picture_gaps(decoder, picture, slice_hdr))
-        return FALSE;
+init_picture (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstVaapiParserInfoH264 * pi)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPicture *const base_picture = &picture->base;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+
+  if (priv->prev_pic_reference)
+    priv->prev_ref_frame_num = priv->frame_num;
+  priv->prev_frame_num = priv->frame_num;
+  priv->frame_num = slice_hdr->frame_num;
+  picture->frame_num = priv->frame_num;
+  picture->frame_num_wrap = priv->frame_num;
+  picture->output_flag = TRUE;  /* XXX: conformant to Annex A only */
+  base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
+  base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+  base_picture->view_id = pi->view_id;
+  base_picture->voc = pi->voc;
+
+  /* Initialize extensions */
+  switch (pi->nalu.extension_type) {
+    case GST_H264_NAL_EXTENSION_MVC:{
+      GstH264NalUnitExtensionMVC *const mvc = &pi->nalu.extension.mvc;
+
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_MVC);
+      if (mvc->inter_view_flag)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+      if (mvc->anchor_pic_flag)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_ANCHOR);
+      break;
+    }
+  }
+
+  /* Reset decoder state for IDR pictures */
+  if (pi->nalu.idr_pic_flag) {
+    GST_DEBUG ("<IDR>");
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_IDR);
+    dpb_flush (decoder, picture);
+  } else if (!fill_picture_gaps (decoder, picture, slice_hdr))
+    return FALSE;
 
-    /* Initialize picture structure */
-    if (slice_hdr->field_pic_flag) {
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
-        priv->pic_structure = slice_hdr->bottom_field_flag ?
-            GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD :
-            GST_H264_SEI_PIC_STRUCT_TOP_FIELD;
-    }
+  /* Initialize picture structure */
+  if (slice_hdr->field_pic_flag) {
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
+    priv->pic_structure = slice_hdr->bottom_field_flag ?
+        GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD :
+        GST_H264_SEI_PIC_STRUCT_TOP_FIELD;
+  }
 
-    base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
-    switch (priv->pic_structure) {
+  base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+  switch (priv->pic_structure) {
     case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
-        base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
-        if (GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture))
-            priv->top_field_first = TRUE;
-        break;
+      base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
+      if (GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture))
+        priv->top_field_first = TRUE;
+      break;
     case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
-        base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
-        break;
+      base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
+      break;
     case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_RFF);
-        // fall-through
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_RFF);
+      // fall-through
     case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
-        if (GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture))
-            priv->top_field_first = TRUE;
-        break;
+      if (GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture))
+        priv->top_field_first = TRUE;
+      break;
     case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_RFF);
-        break;
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_RFF);
+      break;
     case GST_H264_SEI_PIC_STRUCT_FRAME:
-        if (!priv->progressive_sequence && priv->dpb_count == 0)
-            priv->top_field_first = TRUE;
-        break;
-    }
-    picture->structure = base_picture->structure;
-    if (priv->top_field_first)
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_TFF);
-
-    /* Initialize reference flags */
-    if (pi->nalu.ref_idc) {
-        GstH264DecRefPicMarking * const dec_ref_pic_marking =
-            &slice_hdr->dec_ref_pic_marking;
-
-        if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
-            dec_ref_pic_marking->long_term_reference_flag)
-            GST_VAAPI_PICTURE_FLAG_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
-        else
-            GST_VAAPI_PICTURE_FLAG_SET(picture,
-                GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
-    }
+      if (!priv->progressive_sequence && priv->dpb_count == 0)
+        priv->top_field_first = TRUE;
+      break;
+  }
+  picture->structure = base_picture->structure;
+  if (priv->top_field_first)
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_TFF);
+
+  /* Initialize reference flags */
+  if (pi->nalu.ref_idc) {
+    GstH264DecRefPicMarking *const dec_ref_pic_marking =
+        &slice_hdr->dec_ref_pic_marking;
+
+    if (GST_VAAPI_PICTURE_IS_IDR (picture) &&
+        dec_ref_pic_marking->long_term_reference_flag)
+      GST_VAAPI_PICTURE_FLAG_SET (picture,
+          GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
+    else
+      GST_VAAPI_PICTURE_FLAG_SET (picture,
+          GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
+  }
 
-    fill_picture_first_field_gap_done(decoder, slice_hdr);
-    init_picture_poc(decoder, picture, slice_hdr);
-    return TRUE;
+  fill_picture_first_field_gap_done (decoder, slice_hdr);
+  init_picture_poc (decoder, picture, slice_hdr);
+  return TRUE;
 }
 
 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
 static gboolean
-exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
+exec_ref_pic_marking_sliding_window (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SPS * const sps = get_sps(decoder);
-    GstVaapiPictureH264 *ref_picture;
-    guint i, m, max_num_ref_frames;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SPS *const sps = get_sps (decoder);
+  GstVaapiPictureH264 *ref_picture;
+  guint i, m, max_num_ref_frames;
 
-    GST_DEBUG("reference picture marking process (sliding window)");
+  GST_DEBUG ("reference picture marking process (sliding window)");
 
-    if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
-        return TRUE;
+  if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD (priv->current_picture))
+    return TRUE;
 
-    max_num_ref_frames = sps->num_ref_frames;
-    if (max_num_ref_frames == 0)
-        max_num_ref_frames = 1;
-    if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
-        max_num_ref_frames <<= 1;
+  max_num_ref_frames = sps->num_ref_frames;
+  if (max_num_ref_frames == 0)
+    max_num_ref_frames = 1;
+  if (!GST_VAAPI_PICTURE_IS_FRAME (priv->current_picture))
+    max_num_ref_frames <<= 1;
 
-    if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
-        return TRUE;
-    if (priv->short_ref_count < 1)
-        return FALSE;
+  if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
+    return TRUE;
+  if (priv->short_ref_count < 1)
+    return FALSE;
 
-    for (m = 0, i = 1; i < priv->short_ref_count; i++) {
-        GstVaapiPictureH264 * const picture = priv->short_ref[i];
-        if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
-            m = i;
-    }
+  for (m = 0, i = 1; i < priv->short_ref_count; i++) {
+    GstVaapiPictureH264 *const picture = priv->short_ref[i];
+    if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
+      m = i;
+  }
 
-    ref_picture = priv->short_ref[m];
-    gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
-    ARRAY_REMOVE_INDEX(priv->short_ref, m);
-
-    /* Both fields need to be marked as "unused for reference", so
-       remove the other field from the short_ref[] list as well */
-    if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
-        for (i = 0; i < priv->short_ref_count; i++) {
-            if (priv->short_ref[i] == ref_picture->other_field) {
-                ARRAY_REMOVE_INDEX(priv->short_ref, i);
-                break;
-            }
-        }
+  ref_picture = priv->short_ref[m];
+  gst_vaapi_picture_h264_set_reference (ref_picture, 0, TRUE);
+  ARRAY_REMOVE_INDEX (priv->short_ref, m);
+
+  /* Both fields need to be marked as "unused for reference", so
+     remove the other field from the short_ref[] list as well */
+  if (!GST_VAAPI_PICTURE_IS_FRAME (priv->current_picture)
+      && ref_picture->other_field) {
+    for (i = 0; i < priv->short_ref_count; i++) {
+      if (priv->short_ref[i] == ref_picture->other_field) {
+        ARRAY_REMOVE_INDEX (priv->short_ref, i);
+        break;
+      }
     }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 static inline gint32
-get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
+get_picNumX (GstVaapiPictureH264 * picture,
+    GstH264RefPicMarking * ref_pic_marking)
 {
-    gint32 pic_num;
+  gint32 pic_num;
 
-    if (GST_VAAPI_PICTURE_IS_FRAME(picture))
-        pic_num = picture->frame_num_wrap;
-    else
-        pic_num = 2 * picture->frame_num_wrap + 1;
-    pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
-    return pic_num;
+  if (GST_VAAPI_PICTURE_IS_FRAME (picture))
+    pic_num = picture->frame_num_wrap;
+  else
+    pic_num = 2 * picture->frame_num_wrap + 1;
+  pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
+  return pic_num;
 }
 
 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
 static void
-exec_ref_pic_marking_adaptive_mmco_1(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
+exec_ref_pic_marking_adaptive_mmco_1 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    gint32 i, picNumX;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  gint32 i, picNumX;
 
-    picNumX = get_picNumX(picture, ref_pic_marking);
-    i = find_short_term_reference(decoder, picNumX);
-    if (i < 0)
-        return;
+  picNumX = get_picNumX (picture, ref_pic_marking);
+  i = find_short_term_reference (decoder, picNumX);
+  if (i < 0)
+    return;
 
-    gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
-        GST_VAAPI_PICTURE_IS_FRAME(picture));
-    ARRAY_REMOVE_INDEX(priv->short_ref, i);
+  gst_vaapi_picture_h264_set_reference (priv->short_ref[i], 0,
+      GST_VAAPI_PICTURE_IS_FRAME (picture));
+  ARRAY_REMOVE_INDEX (priv->short_ref, i);
 }
 
 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
 static void
-exec_ref_pic_marking_adaptive_mmco_2(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
+exec_ref_pic_marking_adaptive_mmco_2 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    gint32 i;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  gint32 i;
 
-    i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
-    if (i < 0)
-        return;
+  i = find_long_term_reference (decoder, ref_pic_marking->long_term_pic_num);
+  if (i < 0)
+    return;
 
-    gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
-        GST_VAAPI_PICTURE_IS_FRAME(picture));
-    ARRAY_REMOVE_INDEX(priv->long_ref, i);
+  gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0,
+      GST_VAAPI_PICTURE_IS_FRAME (picture));
+  ARRAY_REMOVE_INDEX (priv->long_ref, i);
 }
 
 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
 static void
-exec_ref_pic_marking_adaptive_mmco_3(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *ref_picture, *other_field;
-    gint32 i, picNumX;
-
-    for (i = 0; i < priv->long_ref_count; i++) {
-        if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
-            break;
-    }
-    if (i != priv->long_ref_count) {
-        gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
-        ARRAY_REMOVE_INDEX(priv->long_ref, i);
-    }
-
-    picNumX = get_picNumX(picture, ref_pic_marking);
-    i = find_short_term_reference(decoder, picNumX);
-    if (i < 0)
-        return;
-
-    ref_picture = priv->short_ref[i];
-    ARRAY_REMOVE_INDEX(priv->short_ref, i);
-    priv->long_ref[priv->long_ref_count++] = ref_picture;
-
-    ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
-    gst_vaapi_picture_h264_set_reference(ref_picture,
-        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
-        GST_VAAPI_PICTURE_IS_COMPLETE(picture));
-
-    /* Assign LongTermFrameIdx to the other field if it was also
-       marked as "used for long-term reference */
-    other_field = ref_picture->other_field;
-    if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
-        other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+exec_ref_pic_marking_adaptive_mmco_3 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *ref_picture, *other_field;
+  gint32 i, picNumX;
+
+  for (i = 0; i < priv->long_ref_count; i++) {
+    if (priv->long_ref[i]->long_term_frame_idx ==
+        ref_pic_marking->long_term_frame_idx)
+      break;
+  }
+  if (i != priv->long_ref_count) {
+    gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, TRUE);
+    ARRAY_REMOVE_INDEX (priv->long_ref, i);
+  }
+
+  picNumX = get_picNumX (picture, ref_pic_marking);
+  i = find_short_term_reference (decoder, picNumX);
+  if (i < 0)
+    return;
+
+  ref_picture = priv->short_ref[i];
+  ARRAY_REMOVE_INDEX (priv->short_ref, i);
+  priv->long_ref[priv->long_ref_count++] = ref_picture;
+
+  ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+  gst_vaapi_picture_h264_set_reference (ref_picture,
+      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
+      GST_VAAPI_PICTURE_IS_COMPLETE (picture));
+
+  /* Assign LongTermFrameIdx to the other field if it was also
+     marked as "used for long-term reference */
+  other_field = ref_picture->other_field;
+  if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (other_field))
+    other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
 }
 
 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
  * as "unused for reference" */
 static void
-exec_ref_pic_marking_adaptive_mmco_4(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    gint32 i, long_term_frame_idx;
-
-    long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
-
-    for (i = 0; i < priv->long_ref_count; i++) {
-        if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
-            continue;
-        gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
-        ARRAY_REMOVE_INDEX(priv->long_ref, i);
-        i--;
-    }
+exec_ref_pic_marking_adaptive_mmco_4 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  gint32 i, long_term_frame_idx;
+
+  long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
+
+  for (i = 0; i < priv->long_ref_count; i++) {
+    if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
+      continue;
+    gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, FALSE);
+    ARRAY_REMOVE_INDEX (priv->long_ref, i);
+    i--;
+  }
 }
 
 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
 static void
-exec_ref_pic_marking_adaptive_mmco_5(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
+exec_ref_pic_marking_adaptive_mmco_5 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    dpb_flush(decoder, picture);
+  dpb_flush (decoder, picture);
 
-    priv->prev_pic_has_mmco5 = TRUE;
+  priv->prev_pic_has_mmco5 = TRUE;
 
-    /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
-    priv->frame_num = 0;
-    priv->frame_num_offset = 0;
-    picture->frame_num = 0;
+  /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
+  priv->frame_num = 0;
+  priv->frame_num_offset = 0;
+  picture->frame_num = 0;
 
-    /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
-        picture->field_poc[TOP_FIELD] -= picture->base.poc;
-    if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
-        picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
-    picture->base.poc = 0;
+  /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
+    picture->field_poc[TOP_FIELD] -= picture->base.poc;
+  if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
+    picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
+  picture->base.poc = 0;
 }
 
 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
 static void
-exec_ref_pic_marking_adaptive_mmco_6(
-    GstVaapiDecoderH264  *decoder,
-    GstVaapiPictureH264  *picture,
-    GstH264RefPicMarking *ref_pic_marking
-)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPictureH264 *other_field;
-    guint i;
-
-    for (i = 0; i < priv->long_ref_count; i++) {
-        if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
-            break;
-    }
-    if (i != priv->long_ref_count) {
-        gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
-        ARRAY_REMOVE_INDEX(priv->long_ref, i);
-    }
-
-    picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
-    gst_vaapi_picture_h264_set_reference(picture,
-        GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
-        GST_VAAPI_PICTURE_IS_COMPLETE(picture));
-
-    /* Assign LongTermFrameIdx to the other field if it was also
-       marked as "used for long-term reference */
-    other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
-    if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
-        other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+exec_ref_pic_marking_adaptive_mmco_6 (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPictureH264 *other_field;
+  guint i;
+
+  for (i = 0; i < priv->long_ref_count; i++) {
+    if (priv->long_ref[i]->long_term_frame_idx ==
+        ref_pic_marking->long_term_frame_idx)
+      break;
+  }
+  if (i != priv->long_ref_count) {
+    gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, TRUE);
+    ARRAY_REMOVE_INDEX (priv->long_ref, i);
+  }
+
+  picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+  gst_vaapi_picture_h264_set_reference (picture,
+      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
+      GST_VAAPI_PICTURE_IS_COMPLETE (picture));
+
+  /* Assign LongTermFrameIdx to the other field if it was also
+     marked as "used for long-term reference */
+  other_field = GST_VAAPI_PICTURE_H264 (picture->base.parent_picture);
+  if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (other_field))
+    other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
 }
 
 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
 static gboolean
-exec_ref_pic_marking_adaptive(
-    GstVaapiDecoderH264     *decoder,
-    GstVaapiPictureH264     *picture,
-    GstH264DecRefPicMarking *dec_ref_pic_marking
-)
-{
-    guint i;
-
-    typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
-        GstVaapiDecoderH264  *decoder,
-        GstVaapiPictureH264  *picture,
-        GstH264RefPicMarking *ref_pic_marking
-    );
-
-    static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
-        NULL,
-        exec_ref_pic_marking_adaptive_mmco_1,
-        exec_ref_pic_marking_adaptive_mmco_2,
-        exec_ref_pic_marking_adaptive_mmco_3,
-        exec_ref_pic_marking_adaptive_mmco_4,
-        exec_ref_pic_marking_adaptive_mmco_5,
-        exec_ref_pic_marking_adaptive_mmco_6,
-    };
-
-    GST_DEBUG("reference picture marking process (adaptive memory control)");
-
-    for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
-        GstH264RefPicMarking * const ref_pic_marking =
-            &dec_ref_pic_marking->ref_pic_marking[i];
-
-        const guint mmco = ref_pic_marking->memory_management_control_operation;
-        if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
-            mmco_funcs[mmco](decoder, picture, ref_pic_marking);
-        else {
-            GST_ERROR("unhandled MMCO %u", mmco);
-            return FALSE;
-        }
+exec_ref_pic_marking_adaptive (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture,
+    GstH264DecRefPicMarking * dec_ref_pic_marking)
+{
+  guint i;
+
+  typedef void (*exec_ref_pic_marking_adaptive_mmco_func) (GstVaapiDecoderH264 *
+      decoder, GstVaapiPictureH264 * picture,
+      GstH264RefPicMarking * ref_pic_marking);
+
+  static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
+    NULL,
+    exec_ref_pic_marking_adaptive_mmco_1,
+    exec_ref_pic_marking_adaptive_mmco_2,
+    exec_ref_pic_marking_adaptive_mmco_3,
+    exec_ref_pic_marking_adaptive_mmco_4,
+    exec_ref_pic_marking_adaptive_mmco_5,
+    exec_ref_pic_marking_adaptive_mmco_6,
+  };
+
+  GST_DEBUG ("reference picture marking process (adaptive memory control)");
+
+  for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
+    GstH264RefPicMarking *const ref_pic_marking =
+        &dec_ref_pic_marking->ref_pic_marking[i];
+
+    const guint mmco = ref_pic_marking->memory_management_control_operation;
+    if (mmco < G_N_ELEMENTS (mmco_funcs) && mmco_funcs[mmco])
+      mmco_funcs[mmco] (decoder, picture, ref_pic_marking);
+    else {
+      GST_ERROR ("unhandled MMCO %u", mmco);
+      return FALSE;
     }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 /* 8.2.5 - Execute reference picture marking process */
 static gboolean
-exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+exec_ref_pic_marking (GstVaapiDecoderH264 * decoder,
+    GstVaapiPictureH264 * picture)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
 
-    priv->prev_pic_reference = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
-    priv->prev_pic_has_mmco5 = FALSE;
-    priv->prev_pic_structure = picture->structure;
+  priv->prev_pic_reference = GST_VAAPI_PICTURE_IS_REFERENCE (picture);
+  priv->prev_pic_has_mmco5 = FALSE;
+  priv->prev_pic_structure = picture->structure;
 
-    if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
-        g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
+  if (GST_VAAPI_PICTURE_IS_INTER_VIEW (picture))
+    g_ptr_array_add (priv->inter_views, gst_vaapi_picture_ref (picture));
 
-    if (!priv->prev_pic_reference)
-        return TRUE;
+  if (!priv->prev_pic_reference)
+    return TRUE;
 
-    if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
-        GstH264DecRefPicMarking * const dec_ref_pic_marking =
-            &picture->last_slice_hdr->dec_ref_pic_marking;
-        if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
-            if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
-                return FALSE;
-        }
-        else {
-            if (!exec_ref_pic_marking_sliding_window(decoder))
-                return FALSE;
-        }
+  if (!GST_VAAPI_PICTURE_IS_IDR (picture)) {
+    GstH264DecRefPicMarking *const dec_ref_pic_marking =
+        &picture->last_slice_hdr->dec_ref_pic_marking;
+    if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
+      if (!exec_ref_pic_marking_adaptive (decoder, picture,
+              dec_ref_pic_marking))
+        return FALSE;
+    } else {
+      if (!exec_ref_pic_marking_sliding_window (decoder))
+        return FALSE;
     }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 static void
-vaapi_init_picture(VAPictureH264 *pic)
+vaapi_init_picture (VAPictureH264 * pic)
 {
-    pic->picture_id           = VA_INVALID_ID;
-    pic->frame_idx            = 0;
-    pic->flags                = VA_PICTURE_H264_INVALID;
-    pic->TopFieldOrderCnt     = 0;
-    pic->BottomFieldOrderCnt  = 0;
+  pic->picture_id = VA_INVALID_ID;
+  pic->frame_idx = 0;
+  pic->flags = VA_PICTURE_H264_INVALID;
+  pic->TopFieldOrderCnt = 0;
+  pic->BottomFieldOrderCnt = 0;
 }
 
 static void
-vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
+vaapi_fill_picture (VAPictureH264 * pic, GstVaapiPictureH264 * picture,
     guint picture_structure)
 {
-    if (!picture_structure)
-        picture_structure = picture->structure;
+  if (!picture_structure)
+    picture_structure = picture->structure;
 
-    pic->picture_id = picture->base.surface_id;
-    pic->flags = 0;
+  pic->picture_id = picture->base.surface_id;
+  pic->flags = 0;
 
-    if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
-        pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
-        pic->frame_idx = picture->long_term_frame_idx;
-    }
-    else {
-        if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
-            pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
-        pic->frame_idx = picture->frame_num;
-    }
+  if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (picture)) {
+    pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
+    pic->frame_idx = picture->long_term_frame_idx;
+  } else {
+    if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (picture))
+      pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+    pic->frame_idx = picture->frame_num;
+  }
 
-    switch (picture_structure) {
+  switch (picture_structure) {
     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
-        pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
-        pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
-        break;
+      pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
+      pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
-        pic->flags |= VA_PICTURE_H264_TOP_FIELD;
-        pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
-        pic->BottomFieldOrderCnt = 0;
-        break;
+      pic->flags |= VA_PICTURE_H264_TOP_FIELD;
+      pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
+      pic->BottomFieldOrderCnt = 0;
+      break;
     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
-        pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
-        pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
-        pic->TopFieldOrderCnt = 0;
-        break;
-    }
+      pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+      pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
+      pic->TopFieldOrderCnt = 0;
+      break;
+  }
 }
 
 static void
-vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
-    GstVaapiPictureH264 *picture)
-{
-    vaapi_fill_picture(pic, picture, 0);
-
-    /* H.8.4 - MVC inter prediction and inter-view prediction process */
-    if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
-        /* The inter-view reference components and inter-view only
-           reference components that are included in the reference
-           picture lists are considered as not being marked as "used for
-           short-term reference" or "used for long-term reference" */
-        pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
-                        VA_PICTURE_H264_LONG_TERM_REFERENCE);
-    }
+vaapi_fill_picture_for_RefPicListX (VAPictureH264 * pic,
+    GstVaapiPictureH264 * picture)
+{
+  vaapi_fill_picture (pic, picture, 0);
+
+  /* H.8.4 - MVC inter prediction and inter-view prediction process */
+  if (GST_VAAPI_PICTURE_IS_INTER_VIEW (picture)) {
+    /* The inter-view reference components and inter-view only
+       reference components that are included in the reference
+       picture lists are considered as not being marked as "used for
+       short-term reference" or "used for long-term reference" */
+    pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE |
+        VA_PICTURE_H264_LONG_TERM_REFERENCE);
+  }
 }
 
 static gboolean
-fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiPicture * const base_picture = &picture->base;
-    GstH264PPS * const pps = get_pps(decoder);
-    GstH264SPS * const sps = get_sps(decoder);
-    VAPictureParameterBufferH264 * const pic_param = base_picture->param;
-    guint i, n;
-
-    /* Fill in VAPictureParameterBufferH264 */
-    vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
-
-    for (i = 0, n = 0; i < priv->dpb_count; i++) {
-        GstVaapiFrameStore * const fs = priv->dpb[i];
-        if ((gst_vaapi_frame_store_has_reference(fs) &&
-             fs->view_id == picture->base.view_id) ||
-            (gst_vaapi_frame_store_has_inter_view(fs) &&
-             is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
-            vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
-                fs->buffers[0], fs->structure);
-        if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
-            break;
-    }
-    for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
-        vaapi_init_picture(&pic_param->ReferenceFrames[n]);
+fill_picture (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiPicture *const base_picture = &picture->base;
+  GstH264PPS *const pps = get_pps (decoder);
+  GstH264SPS *const sps = get_sps (decoder);
+  VAPictureParameterBufferH264 *const pic_param = base_picture->param;
+  guint i, n;
+
+  /* Fill in VAPictureParameterBufferH264 */
+  vaapi_fill_picture (&pic_param->CurrPic, picture, 0);
+
+  for (i = 0, n = 0; i < priv->dpb_count; i++) {
+    GstVaapiFrameStore *const fs = priv->dpb[i];
+    if ((gst_vaapi_frame_store_has_reference (fs) &&
+            fs->view_id == picture->base.view_id) ||
+        (gst_vaapi_frame_store_has_inter_view (fs) &&
+            is_inter_view_reference_for_picture (decoder, fs->view_id,
+                picture)))
+      vaapi_fill_picture (&pic_param->ReferenceFrames[n++], fs->buffers[0],
+          fs->structure);
+    if (n >= G_N_ELEMENTS (pic_param->ReferenceFrames))
+      break;
+  }
+  for (; n < G_N_ELEMENTS (pic_param->ReferenceFrames); n++)
+    vaapi_init_picture (&pic_param->ReferenceFrames[n]);
 
 #define COPY_FIELD(s, f) \
     pic_param->f = (s)->f
@@ -3788,62 +3696,65 @@ fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
 #define COPY_BFM(a, s, f) \
     pic_param->a.bits.f = (s)->f
 
-    pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
-    pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
-    pic_param->frame_num                    = priv->frame_num;
-
-    COPY_FIELD(sps, bit_depth_luma_minus8);
-    COPY_FIELD(sps, bit_depth_chroma_minus8);
-    COPY_FIELD(sps, num_ref_frames);
-    COPY_FIELD(pps, num_slice_groups_minus1);
-    COPY_FIELD(pps, slice_group_map_type);
-    COPY_FIELD(pps, slice_group_change_rate_minus1);
-    COPY_FIELD(pps, pic_init_qp_minus26);
-    COPY_FIELD(pps, pic_init_qs_minus26);
-    COPY_FIELD(pps, chroma_qp_index_offset);
-    COPY_FIELD(pps, second_chroma_qp_index_offset);
-
-    pic_param->seq_fields.value                                         = 0; /* reset all bits */
-    pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
-    pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
-
-    COPY_BFM(seq_fields, sps, chroma_format_idc);
-    COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
-    COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
-    COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
-    COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
-    COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
-    COPY_BFM(seq_fields, sps, pic_order_cnt_type);
-    COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
-    COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
-
-    pic_param->pic_fields.value                                         = 0; /* reset all bits */
-    pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
-    pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
-
-    COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
-    COPY_BFM(pic_fields, pps, weighted_pred_flag);
-    COPY_BFM(pic_fields, pps, weighted_bipred_idc);
-    COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
-    COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
-    COPY_BFM(pic_fields, pps, pic_order_present_flag);
-    COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
-    COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
-    return TRUE;
+  pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
+  pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
+  pic_param->frame_num = priv->frame_num;
+
+  COPY_FIELD (sps, bit_depth_luma_minus8);
+  COPY_FIELD (sps, bit_depth_chroma_minus8);
+  COPY_FIELD (sps, num_ref_frames);
+  COPY_FIELD (pps, num_slice_groups_minus1);
+  COPY_FIELD (pps, slice_group_map_type);
+  COPY_FIELD (pps, slice_group_change_rate_minus1);
+  COPY_FIELD (pps, pic_init_qp_minus26);
+  COPY_FIELD (pps, pic_init_qs_minus26);
+  COPY_FIELD (pps, chroma_qp_index_offset);
+  COPY_FIELD (pps, second_chroma_qp_index_offset);
+
+  pic_param->seq_fields.value = 0;      /* reset all bits */
+  pic_param->seq_fields.bits.residual_colour_transform_flag =
+      sps->separate_colour_plane_flag;
+  pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31;       /* A.3.3.2 */
+
+  COPY_BFM (seq_fields, sps, chroma_format_idc);
+  COPY_BFM (seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
+  COPY_BFM (seq_fields, sps, frame_mbs_only_flag);
+  COPY_BFM (seq_fields, sps, mb_adaptive_frame_field_flag);
+  COPY_BFM (seq_fields, sps, direct_8x8_inference_flag);
+  COPY_BFM (seq_fields, sps, log2_max_frame_num_minus4);
+  COPY_BFM (seq_fields, sps, pic_order_cnt_type);
+  COPY_BFM (seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
+  COPY_BFM (seq_fields, sps, delta_pic_order_always_zero_flag);
+
+  pic_param->pic_fields.value = 0;      /* reset all bits */
+  pic_param->pic_fields.bits.field_pic_flag =
+      GST_VAAPI_PICTURE_IS_INTERLACED (picture);
+  pic_param->pic_fields.bits.reference_pic_flag =
+      GST_VAAPI_PICTURE_IS_REFERENCE (picture);
+
+  COPY_BFM (pic_fields, pps, entropy_coding_mode_flag);
+  COPY_BFM (pic_fields, pps, weighted_pred_flag);
+  COPY_BFM (pic_fields, pps, weighted_bipred_idc);
+  COPY_BFM (pic_fields, pps, transform_8x8_mode_flag);
+  COPY_BFM (pic_fields, pps, constrained_intra_pred_flag);
+  COPY_BFM (pic_fields, pps, pic_order_present_flag);
+  COPY_BFM (pic_fields, pps, deblocking_filter_control_present_flag);
+  COPY_BFM (pic_fields, pps, redundant_pic_cnt_present_flag);
+  return TRUE;
 }
 
 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
 static gboolean
-is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
+is_new_picture (GstVaapiParserInfoH264 * pi, GstVaapiParserInfoH264 * prev_pi)
 {
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-    GstH264PPS * const pps = slice_hdr->pps;
-    GstH264SPS * const sps = pps->sequence;
-    GstH264SliceHdr *prev_slice_hdr;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+  GstH264PPS *const pps = slice_hdr->pps;
+  GstH264SPS *const sps = pps->sequence;
+  GstH264SliceHdr *prev_slice_hdr;
 
-    if (!prev_pi)
-        return TRUE;
-    prev_slice_hdr = &prev_pi->data.slice_hdr;
+  if (!prev_pi)
+    return TRUE;
+  prev_slice_hdr = &prev_pi->data.slice_hdr;
 
 #define CHECK_EXPR(expr, field_name) do {              \
         if (!(expr)) {                                 \
@@ -3855,868 +3766,859 @@ is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
 
-    /* view_id differs in value and VOIdx of current slice_hdr is less
-       than the VOIdx of the prev_slice_hdr */
-    CHECK_VALUE(pi, prev_pi, view_id);
+  /* view_id differs in value and VOIdx of current slice_hdr is less
+     than the VOIdx of the prev_slice_hdr */
+  CHECK_VALUE (pi, prev_pi, view_id);
 
-    /* frame_num differs in value, regardless of inferred values to 0 */
-    CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
+  /* frame_num differs in value, regardless of inferred values to 0 */
+  CHECK_VALUE (slice_hdr, prev_slice_hdr, frame_num);
 
-    /* pic_parameter_set_id differs in value */
-    CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
+  /* pic_parameter_set_id differs in value */
+  CHECK_VALUE (slice_hdr, prev_slice_hdr, pps);
 
-    /* field_pic_flag differs in value */
-    CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
+  /* field_pic_flag differs in value */
+  CHECK_VALUE (slice_hdr, prev_slice_hdr, field_pic_flag);
 
-    /* bottom_field_flag is present in both and differs in value */
-    if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
-        CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
+  /* bottom_field_flag is present in both and differs in value */
+  if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
+    CHECK_VALUE (slice_hdr, prev_slice_hdr, bottom_field_flag);
 
-    /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
-    CHECK_EXPR((pi->nalu.ref_idc != 0) ==
-               (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
+  /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
+  CHECK_EXPR ((pi->nalu.ref_idc != 0) ==
+      (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
 
-    /* POC type is 0 for both and either pic_order_cnt_lsb differs in
-       value or delta_pic_order_cnt_bottom differs in value */
-    if (sps->pic_order_cnt_type == 0) {
-        CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
-        if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
-            CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
-    }
+  /* POC type is 0 for both and either pic_order_cnt_lsb differs in
+     value or delta_pic_order_cnt_bottom differs in value */
+  if (sps->pic_order_cnt_type == 0) {
+    CHECK_VALUE (slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
+    if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
+      CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
+  }
 
-    /* POC type is 1 for both and either delta_pic_order_cnt[0]
-       differs in value or delta_pic_order_cnt[1] differs in value */
-    else if (sps->pic_order_cnt_type == 1) {
-        CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
-        CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
-    }
+  /* POC type is 1 for both and either delta_pic_order_cnt[0]
+     differs in value or delta_pic_order_cnt[1] differs in value */
+  else if (sps->pic_order_cnt_type == 1) {
+    CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
+    CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
+  }
 
-    /* IdrPicFlag differs in value */
-    CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
+  /* IdrPicFlag differs in value */
+  CHECK_VALUE (&pi->nalu, &prev_pi->nalu, idr_pic_flag);
 
-    /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
-    if (pi->nalu.idr_pic_flag)
-        CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
+  /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
+  if (pi->nalu.idr_pic_flag)
+    CHECK_VALUE (slice_hdr, prev_slice_hdr, idr_pic_id);
 
 #undef CHECK_EXPR
 #undef CHECK_VALUE
-    return FALSE;
+  return FALSE;
 }
 
 /* Detection of a new access unit, assuming we are already in presence
    of a new picture */
 static inline gboolean
-is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
+is_new_access_unit (GstVaapiParserInfoH264 * pi,
+    GstVaapiParserInfoH264 * prev_pi)
 {
-    if (!prev_pi || prev_pi->view_id == pi->view_id)
-        return TRUE;
-    return pi->voc < prev_pi->voc;
+  if (!prev_pi || prev_pi->view_id == pi->view_id)
+    return TRUE;
+  return pi->voc < prev_pi->voc;
 }
 
 /* Determines whether the supplied picture has the same field parity
    than a picture specified through the other slice header */
 static inline gboolean
-same_field_parity(GstVaapiPictureH264 *field, GstH264SliceHdr *slice_hdr)
+same_field_parity (GstVaapiPictureH264 * field, GstH264SliceHdr * slice_hdr)
 {
-    g_return_val_if_fail(GST_VAAPI_PICTURE_IS_INTERLACED(field), FALSE);
+  g_return_val_if_fail (GST_VAAPI_PICTURE_IS_INTERLACED (field), FALSE);
 
-    return ((field->base.structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD) ^
-         slice_hdr->bottom_field_flag) == 0;
+  return ((field->base.structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD) ^
+      slice_hdr->bottom_field_flag) == 0;
 }
 
 /* Finds the first field picture corresponding to the supplied picture */
 static GstVaapiPictureH264 *
-find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi,
+find_first_field (GstVaapiDecoderH264 * decoder, GstVaapiParserInfoH264 * pi,
     gboolean fill_gaps)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-    GstVaapiFrameStore *fs;
-    GstVaapiPictureH264 *f0, *f1;
-
-    fs = priv->prev_frames[pi->voc];
-    if (!fs)
-        return NULL;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+  GstVaapiFrameStore *fs;
+  GstVaapiPictureH264 *f0, *f1;
 
-    f0 = fs->buffers[0];
-    if (!slice_hdr->field_pic_flag) {
-        if (fill_gaps && !gst_vaapi_frame_store_has_frame(fs))
-            fill_picture_other_field_gap(decoder, f0);
-        return NULL;
-    }
-
-    /* At this point, the current frame is known to be interlaced */
-    if (gst_vaapi_frame_store_has_frame(fs)) {
-        f1 = NULL;
-        if (fill_gaps && !same_field_parity(f0, slice_hdr))
-            f1 = fill_picture_first_field_gap(decoder, f0);
-        return f1;
-    }
+  fs = priv->prev_frames[pi->voc];
+  if (!fs)
+    return NULL;
 
-    /* At this point, the previous frame is interlaced and contains a
-       single field */
-    if (f0->frame_num == slice_hdr->frame_num) {
-        f1 = f0;
-        if (fill_gaps && same_field_parity(f0, slice_hdr)) {
-            fill_picture_other_field_gap(decoder, f0);
-            f1 = NULL;
-        }
-        return f1;
-    }
+  f0 = fs->buffers[0];
+  if (!slice_hdr->field_pic_flag) {
+    if (fill_gaps && !gst_vaapi_frame_store_has_frame (fs))
+      fill_picture_other_field_gap (decoder, f0);
+    return NULL;
+  }
 
+  /* At this point, the current frame is known to be interlaced */
+  if (gst_vaapi_frame_store_has_frame (fs)) {
     f1 = NULL;
-    if (fill_gaps) {
-        fill_picture_other_field_gap(decoder, f0);
-        if (!same_field_parity(f0, slice_hdr))
-            f1 = fill_picture_first_field_gap(decoder, f0);
+    if (fill_gaps && !same_field_parity (f0, slice_hdr))
+      f1 = fill_picture_first_field_gap (decoder, f0);
+    return f1;
+  }
+
+  /* At this point, the previous frame is interlaced and contains a
+     single field */
+  if (f0->frame_num == slice_hdr->frame_num) {
+    f1 = f0;
+    if (fill_gaps && same_field_parity (f0, slice_hdr)) {
+      fill_picture_other_field_gap (decoder, f0);
+      f1 = NULL;
     }
     return f1;
+  }
+
+  f1 = NULL;
+  if (fill_gaps) {
+    fill_picture_other_field_gap (decoder, f0);
+    if (!same_field_parity (f0, slice_hdr))
+      f1 = fill_picture_first_field_gap (decoder, f0);
+  }
+  return f1;
 }
 
 static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_picture (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-    GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
-    GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
-    GstVaapiPictureH264 *picture, *first_field;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+  GstH264PPS *const pps = ensure_pps (decoder, slice_hdr->pps);
+  GstH264SPS *const sps = ensure_sps (decoder, slice_hdr->pps->sequence);
+  GstVaapiPictureH264 *picture, *first_field;
+  GstVaapiDecoderStatus status;
 
-    g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
-    g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+  g_return_val_if_fail (pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+  g_return_val_if_fail (sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
 
-    /* Only decode base stream for MVC */
-    switch (sps->profile_idc) {
+  /* Only decode base stream for MVC */
+  switch (sps->profile_idc) {
     case GST_H264_PROFILE_MULTIVIEW_HIGH:
     case GST_H264_PROFILE_STEREO_HIGH:
-        break;
-    }
+      break;
+  }
 
-    status = ensure_context(decoder, sps);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    priv->decoder_state = 0;
-    gst_vaapi_picture_replace(&priv->missing_picture, NULL);
-
-    first_field = find_first_field(decoder, pi, TRUE);
-    if (first_field) {
-        /* Re-use current picture where the first field was decoded */
-        picture = gst_vaapi_picture_h264_new_field(first_field);
-        if (!picture) {
-            GST_ERROR("failed to allocate field picture");
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-    }
-    else {
-        /* Create new picture */
-        picture = gst_vaapi_picture_h264_new(decoder);
-        if (!picture) {
-            GST_ERROR("failed to allocate picture");
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-    }
-    gst_vaapi_picture_replace(&priv->current_picture, picture);
-    gst_vaapi_picture_unref(picture);
-
-    /* Clear inter-view references list if this is the primary coded
-       picture of the current access unit */
-    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
-        g_ptr_array_set_size(priv->inter_views, 0);
-
-    /* Update cropping rectangle */
-    if (sps->frame_cropping_flag) {
-        GstVaapiRectangle crop_rect;
-        crop_rect.x = sps->crop_rect_x;
-        crop_rect.y = sps->crop_rect_y;
-        crop_rect.width = sps->crop_rect_width;
-        crop_rect.height = sps->crop_rect_height;
-        gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
-    }
+  status = ensure_context (decoder, sps);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    status = ensure_quant_matrix(decoder, picture);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_ERROR("failed to reset quantizer matrix");
-        return status;
-    }
+  priv->decoder_state = 0;
+  gst_vaapi_picture_replace (&priv->missing_picture, NULL);
+
+  first_field = find_first_field (decoder, pi, TRUE);
+  if (first_field) {
+    /* Re-use current picture where the first field was decoded */
+    picture = gst_vaapi_picture_h264_new_field (first_field);
+    if (!picture) {
+      GST_ERROR ("failed to allocate field picture");
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+    }
+  } else {
+    /* Create new picture */
+    picture = gst_vaapi_picture_h264_new (decoder);
+    if (!picture) {
+      GST_ERROR ("failed to allocate picture");
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+    }
+  }
+  gst_vaapi_picture_replace (&priv->current_picture, picture);
+  gst_vaapi_picture_unref (picture);
+
+  /* Clear inter-view references list if this is the primary coded
+     picture of the current access unit */
+  if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+    g_ptr_array_set_size (priv->inter_views, 0);
+
+  /* Update cropping rectangle */
+  if (sps->frame_cropping_flag) {
+    GstVaapiRectangle crop_rect;
+    crop_rect.x = sps->crop_rect_x;
+    crop_rect.y = sps->crop_rect_y;
+    crop_rect.width = sps->crop_rect_width;
+    crop_rect.height = sps->crop_rect_height;
+    gst_vaapi_picture_set_crop_rect (&picture->base, &crop_rect);
+  }
+
+  status = ensure_quant_matrix (decoder, picture);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_ERROR ("failed to reset quantizer matrix");
+    return status;
+  }
 
-    if (!init_picture(decoder, picture, pi))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    if (!fill_picture(decoder, picture))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  if (!init_picture (decoder, picture, pi))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  if (!fill_picture (decoder, picture))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
-    priv->decoder_state = pi->state;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->decoder_state = pi->state;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline guint
-get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
+get_slice_data_bit_offset (GstH264SliceHdr * slice_hdr, guint nal_header_bytes)
 {
-    guint epb_count;
+  guint epb_count;
 
-    epb_count = slice_hdr->n_emulation_prevention_bytes;
-    return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
+  epb_count = slice_hdr->n_emulation_prevention_bytes;
+  return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
 }
 
 static gboolean
-fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
-    GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
-{
-    VASliceParameterBufferH264 * const slice_param = slice->param;
-    GstH264PPS * const pps = get_pps(decoder);
-    GstH264SPS * const sps = get_sps(decoder);
-    GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
-    guint num_weight_tables = 0;
-    gint i, j;
-
-    if (pps->weighted_pred_flag &&
-        (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
-        num_weight_tables = 1;
-    else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
-        num_weight_tables = 2;
-    else
-        num_weight_tables = 0;
-
-    slice_param->luma_log2_weight_denom   = 0;
-    slice_param->chroma_log2_weight_denom = 0;
-    slice_param->luma_weight_l0_flag      = 0;
-    slice_param->chroma_weight_l0_flag    = 0;
-    slice_param->luma_weight_l1_flag      = 0;
-    slice_param->chroma_weight_l1_flag    = 0;
+fill_pred_weight_table (GstVaapiDecoderH264 * decoder,
+    GstVaapiSlice * slice, GstH264SliceHdr * slice_hdr)
+{
+  VASliceParameterBufferH264 *const slice_param = slice->param;
+  GstH264PPS *const pps = get_pps (decoder);
+  GstH264SPS *const sps = get_sps (decoder);
+  GstH264PredWeightTable *const w = &slice_hdr->pred_weight_table;
+  guint num_weight_tables = 0;
+  gint i, j;
+
+  if (pps->weighted_pred_flag &&
+      (GST_H264_IS_P_SLICE (slice_hdr) || GST_H264_IS_SP_SLICE (slice_hdr)))
+    num_weight_tables = 1;
+  else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE (slice_hdr))
+    num_weight_tables = 2;
+  else
+    num_weight_tables = 0;
+
+  slice_param->luma_log2_weight_denom = 0;
+  slice_param->chroma_log2_weight_denom = 0;
+  slice_param->luma_weight_l0_flag = 0;
+  slice_param->chroma_weight_l0_flag = 0;
+  slice_param->luma_weight_l1_flag = 0;
+  slice_param->chroma_weight_l1_flag = 0;
+
+  if (num_weight_tables < 1)
+    return TRUE;
 
-    if (num_weight_tables < 1)
-        return TRUE;
+  slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
+  slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
 
-    slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
-    slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
+  slice_param->luma_weight_l0_flag = 1;
+  for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
+    slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
+    slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
+  }
 
-    slice_param->luma_weight_l0_flag = 1;
+  slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
+  if (slice_param->chroma_weight_l0_flag) {
     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
-        slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
-        slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
+      for (j = 0; j < 2; j++) {
+        slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
+        slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
+      }
     }
+  }
 
-    slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
-    if (slice_param->chroma_weight_l0_flag) {
-        for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
-            for (j = 0; j < 2; j++) {
-                slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
-                slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
-            }
-        }
-    }
+  if (num_weight_tables < 2)
+    return TRUE;
 
-    if (num_weight_tables < 2)
-        return TRUE;
+  slice_param->luma_weight_l1_flag = 1;
+  for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
+    slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
+    slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
+  }
 
-    slice_param->luma_weight_l1_flag = 1;
+  slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
+  if (slice_param->chroma_weight_l1_flag) {
     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
-        slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
-        slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
+      for (j = 0; j < 2; j++) {
+        slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
+        slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
+      }
     }
-
-    slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
-    if (slice_param->chroma_weight_l1_flag) {
-        for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
-            for (j = 0; j < 2; j++) {
-                slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
-                slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
-            }
-        }
-    }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 static gboolean
-fill_RefPicList(GstVaapiDecoderH264 *decoder,
-    GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
+fill_RefPicList (GstVaapiDecoderH264 * decoder,
+    GstVaapiSlice * slice, GstH264SliceHdr * slice_hdr)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    VASliceParameterBufferH264 * const slice_param = slice->param;
-    guint i, num_ref_lists = 0;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  VASliceParameterBufferH264 *const slice_param = slice->param;
+  guint i, num_ref_lists = 0;
 
-    slice_param->num_ref_idx_l0_active_minus1 = 0;
-    slice_param->num_ref_idx_l1_active_minus1 = 0;
+  slice_param->num_ref_idx_l0_active_minus1 = 0;
+  slice_param->num_ref_idx_l1_active_minus1 = 0;
 
-    if (GST_H264_IS_B_SLICE(slice_hdr))
-        num_ref_lists = 2;
-    else if (GST_H264_IS_I_SLICE(slice_hdr))
-        num_ref_lists = 0;
-    else
-        num_ref_lists = 1;
+  if (GST_H264_IS_B_SLICE (slice_hdr))
+    num_ref_lists = 2;
+  else if (GST_H264_IS_I_SLICE (slice_hdr))
+    num_ref_lists = 0;
+  else
+    num_ref_lists = 1;
 
-    if (num_ref_lists < 1)
-        return TRUE;
+  if (num_ref_lists < 1)
+    return TRUE;
 
-    slice_param->num_ref_idx_l0_active_minus1 =
-        slice_hdr->num_ref_idx_l0_active_minus1;
+  slice_param->num_ref_idx_l0_active_minus1 =
+      slice_hdr->num_ref_idx_l0_active_minus1;
 
-    for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
-        vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
-            priv->RefPicList0[i]);
-    for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
-        vaapi_init_picture(&slice_param->RefPicList0[i]);
+  for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
+    vaapi_fill_picture_for_RefPicListX (&slice_param->RefPicList0[i],
+        priv->RefPicList0[i]);
+  for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
+    vaapi_init_picture (&slice_param->RefPicList0[i]);
 
-    if (num_ref_lists < 2)
-        return TRUE;
+  if (num_ref_lists < 2)
+    return TRUE;
 
-    slice_param->num_ref_idx_l1_active_minus1 =
-        slice_hdr->num_ref_idx_l1_active_minus1;
+  slice_param->num_ref_idx_l1_active_minus1 =
+      slice_hdr->num_ref_idx_l1_active_minus1;
 
-    for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
-        vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
-            priv->RefPicList1[i]);
-    for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
-        vaapi_init_picture(&slice_param->RefPicList1[i]);
-    return TRUE;
+  for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
+    vaapi_fill_picture_for_RefPicListX (&slice_param->RefPicList1[i],
+        priv->RefPicList1[i]);
+  for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
+    vaapi_init_picture (&slice_param->RefPicList1[i]);
+  return TRUE;
 }
 
 static gboolean
-fill_slice(GstVaapiDecoderH264 *decoder,
-    GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
-{
-    VASliceParameterBufferH264 * const slice_param = slice->param;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-
-    /* Fill in VASliceParameterBufferH264 */
-    slice_param->slice_data_bit_offset =
-        get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
-    slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
-    slice_param->slice_type                     = slice_hdr->type % 5;
-    slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
-    slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
-    slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
-    slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
-    slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
-    slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
-
-    if (!fill_RefPicList(decoder, slice, slice_hdr))
-        return FALSE;
-    if (!fill_pred_weight_table(decoder, slice, slice_hdr))
-        return FALSE;
-    return TRUE;
+fill_slice (GstVaapiDecoderH264 * decoder,
+    GstVaapiSlice * slice, GstVaapiParserInfoH264 * pi)
+{
+  VASliceParameterBufferH264 *const slice_param = slice->param;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+
+  /* Fill in VASliceParameterBufferH264 */
+  slice_param->slice_data_bit_offset =
+      get_slice_data_bit_offset (slice_hdr, pi->nalu.header_bytes);
+  slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
+  slice_param->slice_type = slice_hdr->type % 5;
+  slice_param->direct_spatial_mv_pred_flag =
+      slice_hdr->direct_spatial_mv_pred_flag;
+  slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
+  slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
+  slice_param->disable_deblocking_filter_idc =
+      slice_hdr->disable_deblocking_filter_idc;
+  slice_param->slice_alpha_c0_offset_div2 =
+      slice_hdr->slice_alpha_c0_offset_div2;
+  slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
+
+  if (!fill_RefPicList (decoder, slice, slice_hdr))
+    return FALSE;
+  if (!fill_pred_weight_table (decoder, slice, slice_hdr))
+    return FALSE;
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
-{
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstVaapiPictureH264 * const picture = priv->current_picture;
-    GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
-    GstVaapiSlice *slice;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    GST_DEBUG("slice (%u bytes)", pi->nalu.size);
-
-    if (!is_valid_state(pi->state,
-            GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
-        GST_WARNING("failed to receive enough headers to decode slice");
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-    }
+decode_slice (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstVaapiPictureH264 *const picture = priv->current_picture;
+  GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
+  GstVaapiSlice *slice;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
 
-    if (!ensure_pps(decoder, slice_hdr->pps)) {
-        GST_ERROR("failed to activate PPS");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  GST_DEBUG ("slice (%u bytes)", pi->nalu.size);
 
-    if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
-        GST_ERROR("failed to activate SPS");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!is_valid_state (pi->state, GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
+    GST_WARNING ("failed to receive enough headers to decode slice");
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  }
 
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!ensure_pps (decoder, slice_hdr->pps)) {
+    GST_ERROR ("failed to activate PPS");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    /* Check wether this is the first/last slice in the current access unit */
-    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
-    if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
-
-    slice = GST_VAAPI_SLICE_NEW(H264, decoder,
-        (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
-    gst_buffer_unmap(buffer, &map_info);
-    if (!slice) {
-        GST_ERROR("failed to allocate slice");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!ensure_sps (decoder, slice_hdr->pps->sequence)) {
+    GST_ERROR ("failed to activate SPS");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    init_picture_refs(decoder, picture, slice_hdr);
-    if (!fill_slice(decoder, slice, pi)) {
-        gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+
+  /* Check wether this is the first/last slice in the current access unit */
+  if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_START);
+  if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+
+  slice = GST_VAAPI_SLICE_NEW (H264, decoder,
+      (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
+  gst_buffer_unmap (buffer, &map_info);
+  if (!slice) {
+    GST_ERROR ("failed to allocate slice");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+
+  init_picture_refs (decoder, picture, slice_hdr);
+  if (!fill_slice (decoder, slice, pi)) {
+    gst_vaapi_mini_object_unref (GST_VAAPI_MINI_OBJECT (slice));
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
-    picture->last_slice_hdr = slice_hdr;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_picture_add_slice (GST_VAAPI_PICTURE_CAST (picture), slice);
+  picture->last_slice_hdr = slice_hdr;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gint
-scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
+scan_for_start_code (GstAdapter * adapter, guint ofs, guint size, guint32 * scp)
 {
-    return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
-                                                     0xffffff00, 0x00000100,
-                                                     ofs, size,
-                                                     scp);
+  return (gint) gst_adapter_masked_scan_uint32_peek (adapter,
+      0xffffff00, 0x00000100, ofs, size, scp);
 }
 
 static GstVaapiDecoderStatus
-decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+decode_unit (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserInfoH264 * const pi = unit->parsed_info;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserInfoH264 *const pi = unit->parsed_info;
+  GstVaapiDecoderStatus status;
 
-    priv->decoder_state |= pi->state;
-    switch (pi->nalu.type) {
+  priv->decoder_state |= pi->state;
+  switch (pi->nalu.type) {
     case GST_H264_NAL_SPS:
-        status = decode_sps(decoder, unit);
-        break;
+      status = decode_sps (decoder, unit);
+      break;
     case GST_H264_NAL_SUBSET_SPS:
-        status = decode_subset_sps(decoder, unit);
-        break;
+      status = decode_subset_sps (decoder, unit);
+      break;
     case GST_H264_NAL_PPS:
-        status = decode_pps(decoder, unit);
-        break;
+      status = decode_pps (decoder, unit);
+      break;
     case GST_H264_NAL_SLICE_EXT:
     case GST_H264_NAL_SLICE_IDR:
-        /* fall-through. IDR specifics are handled in init_picture() */
+      /* fall-through. IDR specifics are handled in init_picture() */
     case GST_H264_NAL_SLICE:
-        status = decode_slice(decoder, unit);
-        break;
+      status = decode_slice (decoder, unit);
+      break;
     case GST_H264_NAL_SEQ_END:
     case GST_H264_NAL_STREAM_END:
-        status = decode_sequence_end(decoder);
-        break;
+      status = decode_sequence_end (decoder);
+      break;
     case GST_H264_NAL_SEI:
-        status = decode_sei(decoder, unit);
-        break;
+      status = decode_sei (decoder, unit);
+      break;
     default:
-        GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
-        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        break;
-    }
-    return status;
+      GST_WARNING ("unsupported NAL unit type %d", pi->nalu.type);
+      status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
-    const guchar *buf, guint buf_size)
+gst_vaapi_decoder_h264_decode_codec_data (GstVaapiDecoder * base_decoder,
+    const guchar * buf, guint buf_size)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-    GstVaapiDecoderUnit unit;
-    GstVaapiParserInfoH264 *pi = NULL;
-    GstH264ParserResult result;
-    guint i, ofs, num_sps, num_pps;
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+  GstVaapiDecoderUnit unit;
+  GstVaapiParserInfoH264 *pi = NULL;
+  GstH264ParserResult result;
+  guint i, ofs, num_sps, num_pps;
 
-    unit.parsed_info = NULL;
+  unit.parsed_info = NULL;
 
-    if (buf_size < 8)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+  if (buf_size < 8)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
-    if (buf[0] != 1) {
-        GST_ERROR("failed to decode codec-data, not in avcC format");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (buf[0] != 1) {
+    GST_ERROR ("failed to decode codec-data, not in avcC format");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    priv->nal_length_size = (buf[4] & 0x03) + 1;
-
-    num_sps = buf[5] & 0x1f;
-    ofs = 6;
-
-    for (i = 0; i < num_sps; i++) {
-        pi = gst_vaapi_parser_info_h264_new();
-        if (!pi)
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        unit.parsed_info = pi;
-
-        result = gst_h264_parser_identify_nalu_avc(
-            priv->parser,
-            buf, ofs, buf_size, 2,
-            &pi->nalu
-        );
-        if (result != GST_H264_PARSER_OK) {
-            status = get_status(result);
-            goto cleanup;
-        }
+  priv->nal_length_size = (buf[4] & 0x03) + 1;
 
-        status = parse_sps(decoder, &unit);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            goto cleanup;
-        ofs = pi->nalu.offset + pi->nalu.size;
+  num_sps = buf[5] & 0x1f;
+  ofs = 6;
 
-        pi->state = priv->parser_state;
-        pi->flags = 0;
+  for (i = 0; i < num_sps; i++) {
+    pi = gst_vaapi_parser_info_h264_new ();
+    if (!pi)
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+    unit.parsed_info = pi;
 
-        status = decode_sps(decoder, &unit);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            goto cleanup;
-        gst_vaapi_parser_info_h264_replace(&pi, NULL);
+    result = gst_h264_parser_identify_nalu_avc (priv->parser,
+        buf, ofs, buf_size, 2, &pi->nalu);
+    if (result != GST_H264_PARSER_OK) {
+      status = get_status (result);
+      goto cleanup;
     }
 
-    num_pps = buf[ofs];
-    ofs++;
-
-    for (i = 0; i < num_pps; i++) {
-        pi = gst_vaapi_parser_info_h264_new();
-        if (!pi)
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        unit.parsed_info = pi;
-
-        result = gst_h264_parser_identify_nalu_avc(
-            priv->parser,
-            buf, ofs, buf_size, 2,
-            &pi->nalu
-        );
-        if (result != GST_H264_PARSER_OK) {
-            status = get_status(result);
-            goto cleanup;
-        }
+    status = parse_sps (decoder, &unit);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      goto cleanup;
+    ofs = pi->nalu.offset + pi->nalu.size;
+
+    pi->state = priv->parser_state;
+    pi->flags = 0;
+
+    status = decode_sps (decoder, &unit);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      goto cleanup;
+    gst_vaapi_parser_info_h264_replace (&pi, NULL);
+  }
 
-        status = parse_pps(decoder, &unit);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            goto cleanup;
-        ofs = pi->nalu.offset + pi->nalu.size;
+  num_pps = buf[ofs];
+  ofs++;
 
-        pi->state = priv->parser_state;
-        pi->flags = 0;
+  for (i = 0; i < num_pps; i++) {
+    pi = gst_vaapi_parser_info_h264_new ();
+    if (!pi)
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+    unit.parsed_info = pi;
 
-        status = decode_pps(decoder, &unit);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            goto cleanup;
-        gst_vaapi_parser_info_h264_replace(&pi, NULL);
+    result = gst_h264_parser_identify_nalu_avc (priv->parser,
+        buf, ofs, buf_size, 2, &pi->nalu);
+    if (result != GST_H264_PARSER_OK) {
+      status = get_status (result);
+      goto cleanup;
     }
 
-    priv->is_avcC = TRUE;
-    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+    status = parse_pps (decoder, &unit);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      goto cleanup;
+    ofs = pi->nalu.offset + pi->nalu.size;
+
+    pi->state = priv->parser_state;
+    pi->flags = 0;
+
+    status = decode_pps (decoder, &unit);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      goto cleanup;
+    gst_vaapi_parser_info_h264_replace (&pi, NULL);
+  }
+
+  priv->is_avcC = TRUE;
+  status = GST_VAAPI_DECODER_STATUS_SUCCESS;
 
 cleanup:
-    gst_vaapi_parser_info_h264_replace(&pi, NULL);
-    return status;
+  gst_vaapi_parser_info_h264_replace (&pi, NULL);
+  return status;
 }
 
 static GstVaapiDecoderStatus
-ensure_decoder(GstVaapiDecoderH264 *decoder)
+ensure_decoder (GstVaapiDecoderH264 * decoder)
 {
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
 
-    if (!priv->is_opened) {
-        priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
-        if (!priv->is_opened)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
+  if (!priv->is_opened) {
+    priv->is_opened = gst_vaapi_decoder_h264_open (decoder);
+    if (!priv->is_opened)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
 
-        status = gst_vaapi_decoder_decode_codec_data(
-            GST_VAAPI_DECODER_CAST(decoder));
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    status =
+        gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
-    GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
-{
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
-    GstVaapiDecoderH264Private * const priv = &decoder->priv;
-    GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
-    GstVaapiParserInfoH264 *pi;
-    GstVaapiDecoderStatus status;
-    GstH264ParserResult result;
-    guchar *buf;
-    guint i, size, buf_size, nalu_size, flags;
-    guint32 start_code;
-    gint ofs, ofs2;
-    gboolean at_au_end = FALSE;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+gst_vaapi_decoder_h264_parse (GstVaapiDecoder * base_decoder,
+    GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
+  GstVaapiDecoderH264Private *const priv = &decoder->priv;
+  GstVaapiParserState *const ps = GST_VAAPI_PARSER_STATE (base_decoder);
+  GstVaapiParserInfoH264 *pi;
+  GstVaapiDecoderStatus status;
+  GstH264ParserResult result;
+  guchar *buf;
+  guint i, size, buf_size, nalu_size, flags;
+  guint32 start_code;
+  gint ofs, ofs2;
+  gboolean at_au_end = FALSE;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    switch (priv->stream_alignment) {
+  switch (priv->stream_alignment) {
     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
     case GST_VAAPI_STREAM_ALIGN_H264_AU:
-        size = gst_adapter_available_fast(adapter);
-        break;
+      size = gst_adapter_available_fast (adapter);
+      break;
     default:
-        size = gst_adapter_available(adapter);
-        break;
-    }
-
-    if (priv->is_avcC) {
-        if (size < priv->nal_length_size)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-        buf = (guchar *)&start_code;
-        g_assert(priv->nal_length_size <= sizeof(start_code));
-        gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
-
-        nalu_size = 0;
-        for (i = 0; i < priv->nal_length_size; i++)
-            nalu_size = (nalu_size << 8) | buf[i];
-
-        buf_size = priv->nal_length_size + nalu_size;
-        if (size < buf_size)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
-            at_au_end = (buf_size == size);
-    }
+      size = gst_adapter_available (adapter);
+      break;
+  }
+
+  if (priv->is_avcC) {
+    if (size < priv->nal_length_size)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+    buf = (guchar *) & start_code;
+    g_assert (priv->nal_length_size <= sizeof (start_code));
+    gst_adapter_copy (adapter, buf, 0, priv->nal_length_size);
+
+    nalu_size = 0;
+    for (i = 0; i < priv->nal_length_size; i++)
+      nalu_size = (nalu_size << 8) | buf[i];
+
+    buf_size = priv->nal_length_size + nalu_size;
+    if (size < buf_size)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+    else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+      at_au_end = (buf_size == size);
+  } else {
+    if (size < 4)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+    if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+      buf_size = size;
     else {
-        if (size < 4)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-        if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
-            buf_size = size;
-        else {
-            ofs = scan_for_start_code(adapter, 0, size, NULL);
-            if (ofs < 0)
-                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-            if (ofs > 0) {
-                gst_adapter_flush(adapter, ofs);
-                size -= ofs;
-            }
-
-            ofs2 = ps->input_offset2 - ofs - 4;
-            if (ofs2 < 4)
-                ofs2 = 4;
-
-            ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
-                scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
-            if (ofs < 0) {
-                // Assume the whole NAL unit is present if end-of-stream
-                // or stream buffers aligned on access unit boundaries
-                if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
-                    at_au_end = TRUE;
-                else if (!at_eos) {
-                    ps->input_offset2 = size;
-                    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-                }
-                ofs = size;
-            }
-            buf_size = ofs;
-        }
-    }
-    ps->input_offset2 = 0;
-
-    buf = (guchar *)gst_adapter_map(adapter, buf_size);
-    if (!buf)
+      ofs = scan_for_start_code (adapter, 0, size, NULL);
+      if (ofs < 0)
         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
-    unit->size = buf_size;
-
-    pi = gst_vaapi_parser_info_h264_new();
-    if (!pi)
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-
-    gst_vaapi_decoder_unit_set_parsed_info(unit,
-        pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
-
-    if (priv->is_avcC)
-        result = gst_h264_parser_identify_nalu_avc(priv->parser,
-            buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
-    else
-        result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
-            buf, 0, buf_size, &pi->nalu);
-    status = get_status(result);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+      if (ofs > 0) {
+        gst_adapter_flush (adapter, ofs);
+        size -= ofs;
+      }
+
+      ofs2 = ps->input_offset2 - ofs - 4;
+      if (ofs2 < 4)
+        ofs2 = 4;
+
+      ofs = G_UNLIKELY (size < ofs2 + 4) ? -1 :
+          scan_for_start_code (adapter, ofs2, size - ofs2, NULL);
+      if (ofs < 0) {
+        // Assume the whole NAL unit is present if end-of-stream
+        // or stream buffers aligned on access unit boundaries
+        if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+          at_au_end = TRUE;
+        else if (!at_eos) {
+          ps->input_offset2 = size;
+          return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+        }
+        ofs = size;
+      }
+      buf_size = ofs;
+    }
+  }
+  ps->input_offset2 = 0;
+
+  buf = (guchar *) gst_adapter_map (adapter, buf_size);
+  if (!buf)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  unit->size = buf_size;
+
+  pi = gst_vaapi_parser_info_h264_new ();
+  if (!pi)
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+
+  gst_vaapi_decoder_unit_set_parsed_info (unit,
+      pi, (GDestroyNotify) gst_vaapi_mini_object_unref);
+
+  if (priv->is_avcC)
+    result = gst_h264_parser_identify_nalu_avc (priv->parser,
+        buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
+  else
+    result = gst_h264_parser_identify_nalu_unchecked (priv->parser,
+        buf, 0, buf_size, &pi->nalu);
+  status = get_status (result);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    switch (pi->nalu.type) {
+  switch (pi->nalu.type) {
     case GST_H264_NAL_SPS:
-        status = parse_sps(decoder, unit);
-        break;
+      status = parse_sps (decoder, unit);
+      break;
     case GST_H264_NAL_SUBSET_SPS:
-        status = parse_subset_sps(decoder, unit);
-        break;
+      status = parse_subset_sps (decoder, unit);
+      break;
     case GST_H264_NAL_PPS:
-        status = parse_pps(decoder, unit);
-        break;
+      status = parse_pps (decoder, unit);
+      break;
     case GST_H264_NAL_SEI:
-        status = parse_sei(decoder, unit);
-        break;
+      status = parse_sei (decoder, unit);
+      break;
     case GST_H264_NAL_SLICE_EXT:
-        if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
-            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-            break;
-        }
-        /* fall-through */
+      if (!GST_H264_IS_MVC_NALU (&pi->nalu)) {
+        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+        break;
+      }
+      /* fall-through */
     case GST_H264_NAL_SLICE_IDR:
     case GST_H264_NAL_SLICE:
-        status = parse_slice(decoder, unit);
-        break;
+      status = parse_slice (decoder, unit);
+      break;
     default:
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-        break;
-    }
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+      status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      break;
+  }
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    flags = 0;
-    if (at_au_end) {
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
-            GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
-    }
-    switch (pi->nalu.type) {
+  flags = 0;
+  if (at_au_end) {
+    flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
+        GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+  }
+  switch (pi->nalu.type) {
     case GST_H264_NAL_AU_DELIMITER:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        /* fall-through */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      /* fall-through */
     case GST_H264_NAL_FILLER_DATA:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
     case GST_H264_NAL_STREAM_END:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
-        /* fall-through */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+      /* fall-through */
     case GST_H264_NAL_SEQ_END:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+      break;
     case GST_H264_NAL_SPS:
     case GST_H264_NAL_SUBSET_SPS:
     case GST_H264_NAL_PPS:
     case GST_H264_NAL_SEI:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
     case GST_H264_NAL_SLICE_EXT:
-        if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-            break;
-        }
-        /* fall-through */
+      if (!GST_H264_IS_MVC_NALU (&pi->nalu)) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+        break;
+      }
+      /* fall-through */
     case GST_H264_NAL_SLICE_IDR:
     case GST_H264_NAL_SLICE:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        if (priv->prev_pi &&
-            (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
-                GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        }
-        else if (is_new_picture(pi, priv->prev_slice_pi)) {
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-            if (is_new_access_unit(pi, priv->prev_slice_pi))
-                flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
-        }
-        gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      if (priv->prev_pi &&
+          (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+            GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      } else if (is_new_picture (pi, priv->prev_slice_pi)) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+        if (is_new_access_unit (pi, priv->prev_slice_pi))
+          flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+      }
+      gst_vaapi_parser_info_h264_replace (&priv->prev_slice_pi, pi);
+      break;
     case GST_H264_NAL_SPS_EXT:
     case GST_H264_NAL_SLICE_AUX:
-        /* skip SPS extension and auxiliary slice for now */
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
+      /* skip SPS extension and auxiliary slice for now */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
     case GST_H264_NAL_PREFIX_UNIT:
-        /* skip Prefix NAL units for now */
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
-            GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
-            GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
+      /* skip Prefix NAL units for now */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
+          GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+          GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
     default:
-        if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
-                GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
-    }
-    if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
-        priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
-    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
+      if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+            GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
+  }
+  if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
+    priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+  GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
 
-    pi->nalu.data = NULL;
-    pi->state = priv->parser_state;
-    pi->flags = flags;
-    gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  pi->nalu.data = NULL;
+  pi->state = priv->parser_state;
+  pi->flags = flags;
+  gst_vaapi_parser_info_h264_replace (&priv->prev_pi, pi);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_h264_decode (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
+  GstVaapiDecoderStatus status;
 
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-    return decode_unit(decoder, unit);
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+  return decode_unit (decoder, unit);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_h264_start_frame (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
 
-    return decode_picture(decoder, unit);
+  return decode_picture (decoder, unit);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_h264_end_frame (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
 
-    return decode_current_picture(decoder);
+  return decode_current_picture (decoder);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_h264_flush (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderH264 * const decoder =
-        GST_VAAPI_DECODER_H264_CAST(base_decoder);
+  GstVaapiDecoderH264 *const decoder =
+      GST_VAAPI_DECODER_H264_CAST (base_decoder);
 
-    dpb_flush(decoder, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  dpb_flush (decoder, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
+gst_vaapi_decoder_h264_class_init (GstVaapiDecoderH264Class * klass)
 {
-    GstVaapiMiniObjectClass * const object_class =
-        GST_VAAPI_MINI_OBJECT_CLASS(klass);
-    GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
+  GstVaapiMiniObjectClass *const object_class =
+      GST_VAAPI_MINI_OBJECT_CLASS (klass);
+  GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
 
-    object_class->size          = sizeof(GstVaapiDecoderH264);
-    object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
+  object_class->size = sizeof (GstVaapiDecoderH264);
+  object_class->finalize = (GDestroyNotify) gst_vaapi_decoder_finalize;
 
-    decoder_class->create       = gst_vaapi_decoder_h264_create;
-    decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
-    decoder_class->parse        = gst_vaapi_decoder_h264_parse;
-    decoder_class->decode       = gst_vaapi_decoder_h264_decode;
-    decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
-    decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
-    decoder_class->flush        = gst_vaapi_decoder_h264_flush;
+  decoder_class->create = gst_vaapi_decoder_h264_create;
+  decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
+  decoder_class->parse = gst_vaapi_decoder_h264_parse;
+  decoder_class->decode = gst_vaapi_decoder_h264_decode;
+  decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
+  decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
+  decoder_class->flush = gst_vaapi_decoder_h264_flush;
 
-    decoder_class->decode_codec_data =
-        gst_vaapi_decoder_h264_decode_codec_data;
+  decoder_class->decode_codec_data = gst_vaapi_decoder_h264_decode_codec_data;
 }
 
 static inline const GstVaapiDecoderClass *
-gst_vaapi_decoder_h264_class(void)
+gst_vaapi_decoder_h264_class (void)
 {
-    static GstVaapiDecoderH264Class g_class;
-    static gsize g_class_init = FALSE;
+  static GstVaapiDecoderH264Class g_class;
+  static gsize g_class_init = FALSE;
 
-    if (g_once_init_enter(&g_class_init)) {
-        gst_vaapi_decoder_h264_class_init(&g_class);
-        g_once_init_leave(&g_class_init, TRUE);
-    }
-    return GST_VAAPI_DECODER_CLASS(&g_class);
+  if (g_once_init_enter (&g_class_init)) {
+    gst_vaapi_decoder_h264_class_init (&g_class);
+    g_once_init_leave (&g_class_init, TRUE);
+  }
+  return GST_VAAPI_DECODER_CLASS (&g_class);
 }
 
 /**
@@ -4729,12 +4631,12 @@ gst_vaapi_decoder_h264_class(void)
  * specific alignment, NAL unit boundaries, or access unit boundaries.
  */
 void
-gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+gst_vaapi_decoder_h264_set_alignment (GstVaapiDecoderH264 * decoder,
     GstVaapiStreamAlignH264 alignment)
 {
-    g_return_if_fail(decoder != NULL);
+  g_return_if_fail (decoder != NULL);
 
-    decoder->priv.stream_alignment = alignment;
+  decoder->priv.stream_alignment = alignment;
 }
 
 /**
@@ -4748,7 +4650,7 @@ gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
  * Return value: the newly allocated #GstVaapiDecoder object
  */
 GstVaapiDecoder *
-gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
+gst_vaapi_decoder_h264_new (GstVaapiDisplay * display, GstCaps * caps)
 {
-    return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
+  return gst_vaapi_decoder_new (gst_vaapi_decoder_h264_class (), display, caps);
 }
index 781a60d..d199bb2 100644 (file)
 #define GST_VAAPI_DECODER_JPEG_CAST(decoder) \
     ((GstVaapiDecoderJpeg *)(decoder))
 
-typedef struct _GstVaapiDecoderJpegPrivate      GstVaapiDecoderJpegPrivate;
-typedef struct _GstVaapiDecoderJpegClass        GstVaapiDecoderJpegClass;
-
-typedef enum  {
-    GST_JPEG_VIDEO_STATE_GOT_SOI        = 1 << 0,
-    GST_JPEG_VIDEO_STATE_GOT_SOF        = 1 << 1,
-    GST_JPEG_VIDEO_STATE_GOT_SOS        = 1 << 2,
-    GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE  = 1 << 3,
-    GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE   = 1 << 4,
-
-    GST_JPEG_VIDEO_STATE_VALID_PICTURE = (
-        GST_JPEG_VIDEO_STATE_GOT_SOI |
-        GST_JPEG_VIDEO_STATE_GOT_SOF |
-        GST_JPEG_VIDEO_STATE_GOT_SOS),
+typedef struct _GstVaapiDecoderJpegPrivate GstVaapiDecoderJpegPrivate;
+typedef struct _GstVaapiDecoderJpegClass GstVaapiDecoderJpegClass;
+
+typedef enum
+{
+  GST_JPEG_VIDEO_STATE_GOT_SOI = 1 << 0,
+  GST_JPEG_VIDEO_STATE_GOT_SOF = 1 << 1,
+  GST_JPEG_VIDEO_STATE_GOT_SOS = 1 << 2,
+  GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE = 1 << 3,
+  GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE = 1 << 4,
+
+  GST_JPEG_VIDEO_STATE_VALID_PICTURE = (GST_JPEG_VIDEO_STATE_GOT_SOI |
+      GST_JPEG_VIDEO_STATE_GOT_SOF | GST_JPEG_VIDEO_STATE_GOT_SOS),
 } GstJpegVideoState;
 
-struct _GstVaapiDecoderJpegPrivate {
-    GstVaapiProfile             profile;
-    guint                       width;
-    guint                       height;
-    GstVaapiPicture            *current_picture;
-    GstJpegFrameHdr             frame_hdr;
-    GstJpegHuffmanTables        huf_tables;
-    GstJpegQuantTables          quant_tables;
-    guint                       mcu_restart;
-    guint                       parser_state;
-    guint                       decoder_state;
-    guint                       is_opened       : 1;
-    guint                       profile_changed : 1;
+struct _GstVaapiDecoderJpegPrivate
+{
+  GstVaapiProfile profile;
+  guint width;
+  guint height;
+  GstVaapiPicture *current_picture;
+  GstJpegFrameHdr frame_hdr;
+  GstJpegHuffmanTables huf_tables;
+  GstJpegQuantTables quant_tables;
+  guint mcu_restart;
+  guint parser_state;
+  guint decoder_state;
+  guint is_opened:1;
+  guint profile_changed:1;
 };
 
 /**
@@ -82,10 +82,11 @@ struct _GstVaapiDecoderJpegPrivate {
  *
  * A decoder based on Jpeg.
  */
-struct _GstVaapiDecoderJpeg {
-    /*< private >*/
-    GstVaapiDecoder             parent_instance;
-    GstVaapiDecoderJpegPrivate  priv;
+struct _GstVaapiDecoderJpeg
+{
+  /*< private > */
+  GstVaapiDecoder parent_instance;
+  GstVaapiDecoderJpegPrivate priv;
 };
 
 /**
@@ -93,122 +94,121 @@ struct _GstVaapiDecoderJpeg {
  *
  * A decoder class based on Jpeg.
  */
-struct _GstVaapiDecoderJpegClass {
-    /*< private >*/
-    GstVaapiDecoderClass parent_class;
+struct _GstVaapiDecoderJpegClass
+{
+  /*< private > */
+  GstVaapiDecoderClass parent_class;
 };
 
 static inline void
-unit_set_marker_code(GstVaapiDecoderUnit *unit, GstJpegMarker marker)
+unit_set_marker_code (GstVaapiDecoderUnit * unit, GstJpegMarker marker)
 {
-    unit->parsed_info = GSIZE_TO_POINTER(marker);
+  unit->parsed_info = GSIZE_TO_POINTER (marker);
 }
 
 static inline GstJpegMarker
-unit_get_marker_code(GstVaapiDecoderUnit *unit)
+unit_get_marker_code (GstVaapiDecoderUnit * unit)
 {
-    return GPOINTER_TO_SIZE(unit->parsed_info);
+  return GPOINTER_TO_SIZE (unit->parsed_info);
 }
 
 static void
-gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
+gst_vaapi_decoder_jpeg_close (GstVaapiDecoderJpeg * decoder)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
 
-    /* Reset all */
-    priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
-    priv->width                 = 0;
-    priv->height                = 0;
-    priv->is_opened             = FALSE;
-    priv->profile_changed       = TRUE;
+  /* Reset all */
+  priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
+  priv->width = 0;
+  priv->height = 0;
+  priv->is_opened = FALSE;
+  priv->profile_changed = TRUE;
 }
 
 static gboolean
-gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder)
+gst_vaapi_decoder_jpeg_open (GstVaapiDecoderJpeg * decoder)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    gst_vaapi_decoder_jpeg_close(decoder);
+  gst_vaapi_decoder_jpeg_close (decoder);
 
-    priv->parser_state  = 0;
-    priv->decoder_state = 0;
-    return TRUE;
+  priv->parser_state = 0;
+  priv->decoder_state = 0;
+  return TRUE;
 }
 
 static void
-gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_jpeg_destroy (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
 
-    gst_vaapi_decoder_jpeg_close(decoder);
+  gst_vaapi_decoder_jpeg_close (decoder);
 }
 
 static gboolean
-gst_vaapi_decoder_jpeg_create(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_jpeg_create (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
-    priv->profile_changed       = TRUE;
-    return TRUE;
+  priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
+  priv->profile_changed = TRUE;
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderJpeg *decoder)
-{
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiProfile profiles[2];
-    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
-    guint i, n_profiles = 0;
-    gboolean reset_context = FALSE;
-
-    if (priv->profile_changed) {
-        GST_DEBUG("profile changed");
-        priv->profile_changed = FALSE;
-        reset_context         = TRUE;
-
-        profiles[n_profiles++] = priv->profile;
-        //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
-        //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
-
-        for (i = 0; i < n_profiles; i++) {
-            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
-                                              profiles[i], entrypoint))
-                break;
-        }
-        if (i == n_profiles)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-        priv->profile = profiles[i];
-    }
-
-    if (reset_context) {
-        GstVaapiContextInfo info;
-
-        info.profile    = priv->profile;
-        info.entrypoint = entrypoint;
-        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
-        info.width      = priv->width;
-        info.height     = priv->height;
-        info.ref_frames = 2;
-        reset_context   = gst_vaapi_decoder_ensure_context(
-            GST_VAAPI_DECODER(decoder),
-            &info
-        );
-        if (!reset_context)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ensure_context (GstVaapiDecoderJpeg * decoder)
+{
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiProfile profiles[2];
+  GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+  guint i, n_profiles = 0;
+  gboolean reset_context = FALSE;
+
+  if (priv->profile_changed) {
+    GST_DEBUG ("profile changed");
+    priv->profile_changed = FALSE;
+    reset_context = TRUE;
+
+    profiles[n_profiles++] = priv->profile;
+    //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
+    //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
+
+    for (i = 0; i < n_profiles; i++) {
+      if (gst_vaapi_display_has_decoder (GST_VAAPI_DECODER_DISPLAY (decoder),
+              profiles[i], entrypoint))
+        break;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    if (i == n_profiles)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+    priv->profile = profiles[i];
+  }
+
+  if (reset_context) {
+    GstVaapiContextInfo info;
+
+    info.profile = priv->profile;
+    info.entrypoint = entrypoint;
+    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+    info.width = priv->width;
+    info.height = priv->height;
+    info.ref_frames = 2;
+    reset_context =
+        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
+    if (!reset_context)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gboolean
-is_valid_state(guint state, guint ref_state)
+is_valid_state (guint state, guint ref_state)
 {
-    return (state & ref_state) == ref_state;
+  return (state & ref_state) == ref_state;
 }
 
 #define VALID_STATE(TYPE, STATE)                \
@@ -216,677 +216,662 @@ is_valid_state(guint state, guint ref_state)
         G_PASTE(GST_JPEG_VIDEO_STATE_,STATE))
 
 static GstVaapiDecoderStatus
-decode_current_picture(GstVaapiDecoderJpeg *decoder)
+decode_current_picture (GstVaapiDecoderJpeg * decoder)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-
-    if (!VALID_STATE(decoder, VALID_PICTURE))
-        goto drop_frame;
-    priv->decoder_state = 0;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
 
-    if (!picture)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!VALID_STATE (decoder, VALID_PICTURE))
+    goto drop_frame;
+  priv->decoder_state = 0;
 
-    if (!gst_vaapi_picture_decode(picture))
-        goto error;
-    if (!gst_vaapi_picture_output(picture))
-        goto error;
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
+  if (!picture)
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
+  if (!gst_vaapi_picture_decode (picture))
+    goto error;
+  if (!gst_vaapi_picture_output (picture))
+    goto error;
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
 error:
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
 drop_frame:
-    priv->decoder_state = 0;
-    return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+  priv->decoder_state = 0;
+  return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
 }
 
 static gboolean
-fill_picture(
-    GstVaapiDecoderJpeg *decoder, 
-    GstVaapiPicture     *picture,
-    GstJpegFrameHdr     *frame_hdr
-)
-{
-    VAPictureParameterBufferJPEGBaseline * const pic_param = picture->param;
-    guint i;
-
-    memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
-    pic_param->picture_width    = frame_hdr->width;
-    pic_param->picture_height   = frame_hdr->height;
-
-    pic_param->num_components   = frame_hdr->num_components;
-    if (frame_hdr->num_components > 4)
-        return FALSE;
-    for (i = 0; i < pic_param->num_components; i++) {
-        pic_param->components[i].component_id =
-            frame_hdr->components[i].identifier;
-        pic_param->components[i].h_sampling_factor =
-            frame_hdr->components[i].horizontal_factor;
-        pic_param->components[i].v_sampling_factor =
-            frame_hdr->components[i].vertical_factor;
-        pic_param->components[i].quantiser_table_selector =
-            frame_hdr->components[i].quant_table_selector;
-    }
-    return TRUE;
+fill_picture (GstVaapiDecoderJpeg * decoder,
+    GstVaapiPicture * picture, GstJpegFrameHdr * frame_hdr)
+{
+  VAPictureParameterBufferJPEGBaseline *const pic_param = picture->param;
+  guint i;
+
+  memset (pic_param, 0, sizeof (VAPictureParameterBufferJPEGBaseline));
+  pic_param->picture_width = frame_hdr->width;
+  pic_param->picture_height = frame_hdr->height;
+
+  pic_param->num_components = frame_hdr->num_components;
+  if (frame_hdr->num_components > 4)
+    return FALSE;
+  for (i = 0; i < pic_param->num_components; i++) {
+    pic_param->components[i].component_id = frame_hdr->components[i].identifier;
+    pic_param->components[i].h_sampling_factor =
+        frame_hdr->components[i].horizontal_factor;
+    pic_param->components[i].v_sampling_factor =
+        frame_hdr->components[i].vertical_factor;
+    pic_param->components[i].quantiser_table_selector =
+        frame_hdr->components[i].quant_table_selector;
+  }
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
-{
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    VAIQMatrixBufferJPEGBaseline *iq_matrix;
-    guint i, j, num_tables;
-
-    if (!VALID_STATE(decoder, GOT_IQ_TABLE))
-        gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
-    
-    picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
-    if (!picture->iq_matrix) {
-        GST_ERROR("failed to allocate quantiser table");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+fill_quantization_table (GstVaapiDecoderJpeg * decoder,
+    GstVaapiPicture * picture)
+{
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  VAIQMatrixBufferJPEGBaseline *iq_matrix;
+  guint i, j, num_tables;
+
+  if (!VALID_STATE (decoder, GOT_IQ_TABLE))
+    gst_jpeg_get_default_quantization_tables (&priv->quant_tables);
+
+  picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (JPEGBaseline, decoder);
+  if (!picture->iq_matrix) {
+    GST_ERROR ("failed to allocate quantiser table");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  iq_matrix = picture->iq_matrix->param;
+
+  num_tables = MIN (G_N_ELEMENTS (iq_matrix->quantiser_table),
+      GST_JPEG_MAX_QUANT_ELEMENTS);
+
+  for (i = 0; i < num_tables; i++) {
+    GstJpegQuantTable *const quant_table = &priv->quant_tables.quant_tables[i];
+
+    iq_matrix->load_quantiser_table[i] = quant_table->valid;
+    if (!iq_matrix->load_quantiser_table[i])
+      continue;
+
+    if (quant_table->quant_precision != 0) {
+      // Only Baseline profile is supported, thus 8-bit Qk values
+      GST_ERROR ("unsupported quantization table element precision");
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
     }
-    iq_matrix = picture->iq_matrix->param;
-
-    num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
-                     GST_JPEG_MAX_QUANT_ELEMENTS);
-
-    for (i = 0; i < num_tables; i++) {
-        GstJpegQuantTable * const quant_table =
-            &priv->quant_tables.quant_tables[i];
-
-        iq_matrix->load_quantiser_table[i] = quant_table->valid;
-        if (!iq_matrix->load_quantiser_table[i])
-            continue;
-
-        if (quant_table->quant_precision != 0) {
-            // Only Baseline profile is supported, thus 8-bit Qk values
-            GST_ERROR("unsupported quantization table element precision");
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
-        }
 
-        for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
-            iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
-        iq_matrix->load_quantiser_table[i] = 1;
-        quant_table->valid = FALSE;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
+      iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
+    iq_matrix->load_quantiser_table[i] = 1;
+    quant_table->valid = FALSE;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static gboolean
-huffman_tables_updated(const GstJpegHuffmanTables *huf_tables)
+huffman_tables_updated (const GstJpegHuffmanTables * huf_tables)
 {
-    guint i;
-
-    for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
-        if (huf_tables->dc_tables[i].valid)
-            return TRUE;
-    for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
-        if (huf_tables->ac_tables[i].valid)
-            return TRUE;
-    return FALSE;
+  guint i;
+
+  for (i = 0; i < G_N_ELEMENTS (huf_tables->dc_tables); i++)
+    if (huf_tables->dc_tables[i].valid)
+      return TRUE;
+  for (i = 0; i < G_N_ELEMENTS (huf_tables->ac_tables); i++)
+    if (huf_tables->ac_tables[i].valid)
+      return TRUE;
+  return FALSE;
 }
 
 static void
-huffman_tables_reset(GstJpegHuffmanTables *huf_tables)
+huffman_tables_reset (GstJpegHuffmanTables * huf_tables)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
-        huf_tables->dc_tables[i].valid = FALSE;
-    for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
-        huf_tables->ac_tables[i].valid = FALSE;
+  for (i = 0; i < G_N_ELEMENTS (huf_tables->dc_tables); i++)
+    huf_tables->dc_tables[i].valid = FALSE;
+  for (i = 0; i < G_N_ELEMENTS (huf_tables->ac_tables); i++)
+    huf_tables->ac_tables[i].valid = FALSE;
 }
 
 static void
-fill_huffman_table(GstVaapiHuffmanTable *huf_table,
-    const GstJpegHuffmanTables *huf_tables)
-{
-    VAHuffmanTableBufferJPEGBaseline * const huffman_table = huf_table->param;
-    guint i, num_tables;
-
-    num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
-                     GST_JPEG_MAX_SCAN_COMPONENTS);
-
-    for (i = 0; i < num_tables; i++) {
-        huffman_table->load_huffman_table[i] =
-            huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
-        if (!huffman_table->load_huffman_table[i])
-            continue;
-
-        memcpy(huffman_table->huffman_table[i].num_dc_codes,
-               huf_tables->dc_tables[i].huf_bits,
-               sizeof(huffman_table->huffman_table[i].num_dc_codes));
-        memcpy(huffman_table->huffman_table[i].dc_values,
-               huf_tables->dc_tables[i].huf_values,
-               sizeof(huffman_table->huffman_table[i].dc_values));
-        memcpy(huffman_table->huffman_table[i].num_ac_codes,
-               huf_tables->ac_tables[i].huf_bits,
-               sizeof(huffman_table->huffman_table[i].num_ac_codes));
-        memcpy(huffman_table->huffman_table[i].ac_values,
-               huf_tables->ac_tables[i].huf_values,
-               sizeof(huffman_table->huffman_table[i].ac_values));
-        memset(huffman_table->huffman_table[i].pad,
-               0,
-               sizeof(huffman_table->huffman_table[i].pad));
-    }
+fill_huffman_table (GstVaapiHuffmanTable * huf_table,
+    const GstJpegHuffmanTables * huf_tables)
+{
+  VAHuffmanTableBufferJPEGBaseline *const huffman_table = huf_table->param;
+  guint i, num_tables;
+
+  num_tables = MIN (G_N_ELEMENTS (huffman_table->huffman_table),
+      GST_JPEG_MAX_SCAN_COMPONENTS);
+
+  for (i = 0; i < num_tables; i++) {
+    huffman_table->load_huffman_table[i] =
+        huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
+    if (!huffman_table->load_huffman_table[i])
+      continue;
+
+    memcpy (huffman_table->huffman_table[i].num_dc_codes,
+        huf_tables->dc_tables[i].huf_bits,
+        sizeof (huffman_table->huffman_table[i].num_dc_codes));
+    memcpy (huffman_table->huffman_table[i].dc_values,
+        huf_tables->dc_tables[i].huf_values,
+        sizeof (huffman_table->huffman_table[i].dc_values));
+    memcpy (huffman_table->huffman_table[i].num_ac_codes,
+        huf_tables->ac_tables[i].huf_bits,
+        sizeof (huffman_table->huffman_table[i].num_ac_codes));
+    memcpy (huffman_table->huffman_table[i].ac_values,
+        huf_tables->ac_tables[i].huf_values,
+        sizeof (huffman_table->huffman_table[i].ac_values));
+    memset (huffman_table->huffman_table[i].pad,
+        0, sizeof (huffman_table->huffman_table[i].pad));
+  }
 }
 
 static void
-get_max_sampling_factors(const GstJpegFrameHdr *frame_hdr,
-    guint *h_max_ptr, guint *v_max_ptr)
-{
-    guint h_max = frame_hdr->components[0].horizontal_factor;
-    guint v_max = frame_hdr->components[0].vertical_factor;
-    guint i;
-
-    for (i = 1; i < frame_hdr->num_components; i++) {
-        const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
-        if (h_max < fcp->horizontal_factor)
-            h_max = fcp->horizontal_factor;
-        if (v_max < fcp->vertical_factor)
-            v_max = fcp->vertical_factor;
-    }
-
-    if (h_max_ptr)
-        *h_max_ptr = h_max;
-    if (v_max_ptr)
-        *v_max_ptr = v_max;
+get_max_sampling_factors (const GstJpegFrameHdr * frame_hdr,
+    guint * h_max_ptr, guint * v_max_ptr)
+{
+  guint h_max = frame_hdr->components[0].horizontal_factor;
+  guint v_max = frame_hdr->components[0].vertical_factor;
+  guint i;
+
+  for (i = 1; i < frame_hdr->num_components; i++) {
+    const GstJpegFrameComponent *const fcp = &frame_hdr->components[i];
+    if (h_max < fcp->horizontal_factor)
+      h_max = fcp->horizontal_factor;
+    if (v_max < fcp->vertical_factor)
+      v_max = fcp->vertical_factor;
+  }
+
+  if (h_max_ptr)
+    *h_max_ptr = h_max;
+  if (v_max_ptr)
+    *v_max_ptr = v_max;
 }
 
 static const GstJpegFrameComponent *
-get_component(const GstJpegFrameHdr *frame_hdr, guint selector)
+get_component (const GstJpegFrameHdr * frame_hdr, guint selector)
 {
-    guint i;
-
-    for (i = 0; i < frame_hdr->num_components; i++) {
-        const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
-        if (fcp->identifier == selector)
-            return fcp;
-    }
-    return NULL;
+  guint i;
+
+  for (i = 0; i < frame_hdr->num_components; i++) {
+    const GstJpegFrameComponent *const fcp = &frame_hdr->components[i];
+    if (fcp->identifier == selector)
+      return fcp;
+  }
+  return NULL;
 }
 
 static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderJpeg *decoder, GstJpegSegment *seg)
+decode_picture (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstJpegFrameHdr *const frame_hdr = &priv->frame_hdr;
 
-    if (!VALID_STATE(decoder, GOT_SOI))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!VALID_STATE (decoder, GOT_SOI))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    switch (seg->marker) {
+  switch (seg->marker) {
     case GST_JPEG_MARKER_SOF_MIN:
-        priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
-        break;
+      priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
+      break;
     default:
-        GST_ERROR("unsupported profile %d", seg->marker);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-
-    memset(frame_hdr, 0, sizeof(*frame_hdr));
-    if (!gst_jpeg_segment_parse_frame_header(seg, frame_hdr)) {
-        GST_ERROR("failed to parse image");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-    priv->height = frame_hdr->height;
-    priv->width  = frame_hdr->width;
-
-    priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      GST_ERROR ("unsupported profile %d", seg->marker);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+
+  memset (frame_hdr, 0, sizeof (*frame_hdr));
+  if (!gst_jpeg_segment_parse_frame_header (seg, frame_hdr)) {
+    GST_ERROR ("failed to parse image");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+  priv->height = frame_hdr->height;
+  priv->width = frame_hdr->width;
+
+  priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_huffman_table(
-    GstVaapiDecoderJpeg *decoder,
-    GstJpegSegment *seg
-)
+decode_huffman_table (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    if (!VALID_STATE(decoder, GOT_SOI))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!VALID_STATE (decoder, GOT_SOI))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    if (!gst_jpeg_segment_parse_huffman_table(seg, &priv->huf_tables)) {
-        GST_ERROR("failed to parse Huffman table");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_jpeg_segment_parse_huffman_table (seg, &priv->huf_tables)) {
+    GST_ERROR ("failed to parse Huffman table");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_quant_table(
-    GstVaapiDecoderJpeg *decoder,
-    GstJpegSegment *seg
-)
+decode_quant_table (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    if (!VALID_STATE(decoder, GOT_SOI))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!VALID_STATE (decoder, GOT_SOI))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    if (!gst_jpeg_segment_parse_quantization_table(seg, &priv->quant_tables)) {
-        GST_ERROR("failed to parse quantization table");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_jpeg_segment_parse_quantization_table (seg, &priv->quant_tables)) {
+    GST_ERROR ("failed to parse quantization table");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_restart_interval(
-    GstVaapiDecoderJpeg *decoder,
-    GstJpegSegment *seg
-)
+decode_restart_interval (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-
-    if (!VALID_STATE(decoder, GOT_SOI))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
 
-    if (!gst_jpeg_segment_parse_restart_interval(seg, &priv->mcu_restart)) {
-        GST_ERROR("failed to parse restart interval");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!VALID_STATE (decoder, GOT_SOI))
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+  if (!gst_jpeg_segment_parse_restart_interval (seg, &priv->mcu_restart)) {
+    GST_ERROR ("failed to parse restart interval");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_scan(GstVaapiDecoderJpeg *decoder, GstJpegSegment *seg)
-{
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-    GstVaapiSlice *slice;
-    VASliceParameterBufferJPEGBaseline *slice_param;
-    GstJpegScanHdr scan_hdr;
-    guint scan_hdr_size, scan_data_size;
-    guint i, h_max, v_max, mcu_width, mcu_height;
-
-    if (!VALID_STATE(decoder, GOT_SOF))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1];
-    scan_data_size = seg->size - scan_hdr_size;
-
-    memset(&scan_hdr, 0, sizeof(scan_hdr));
-    if (!gst_jpeg_segment_parse_scan_header(seg, &scan_hdr)) {
-        GST_ERROR("failed to parse scan header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-
-    slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder,
-        seg->data + seg->offset + scan_hdr_size, scan_data_size);
-    if (!slice) {
-        GST_ERROR("failed to allocate slice");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_add_slice(picture, slice);
-
-    if (!VALID_STATE(decoder, GOT_HUF_TABLE))
-        gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
-
-    // Update VA Huffman table if it changed for this scan
-    if (huffman_tables_updated(&priv->huf_tables)) {
-        slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
-        if (!slice->huf_table) {
-            GST_ERROR("failed to allocate Huffman tables");
-            huffman_tables_reset(&priv->huf_tables);
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-        fill_huffman_table(slice->huf_table, &priv->huf_tables);
-        huffman_tables_reset(&priv->huf_tables);
-    }
+decode_scan (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
+{
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
+  GstVaapiSlice *slice;
+  VASliceParameterBufferJPEGBaseline *slice_param;
+  GstJpegScanHdr scan_hdr;
+  guint scan_hdr_size, scan_data_size;
+  guint i, h_max, v_max, mcu_width, mcu_height;
+
+  if (!VALID_STATE (decoder, GOT_SOF))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    slice_param = slice->param;
-    slice_param->num_components = scan_hdr.num_components;
-    for (i = 0; i < scan_hdr.num_components; i++) {
-        slice_param->components[i].component_selector =
-            scan_hdr.components[i].component_selector;
-        slice_param->components[i].dc_table_selector =
-            scan_hdr.components[i].dc_selector;
-        slice_param->components[i].ac_table_selector =
-            scan_hdr.components[i].ac_selector;
+  scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1];
+  scan_data_size = seg->size - scan_hdr_size;
+
+  memset (&scan_hdr, 0, sizeof (scan_hdr));
+  if (!gst_jpeg_segment_parse_scan_header (seg, &scan_hdr)) {
+    GST_ERROR ("failed to parse scan header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+
+  slice = GST_VAAPI_SLICE_NEW (JPEGBaseline, decoder,
+      seg->data + seg->offset + scan_hdr_size, scan_data_size);
+  if (!slice) {
+    GST_ERROR ("failed to allocate slice");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_add_slice (picture, slice);
+
+  if (!VALID_STATE (decoder, GOT_HUF_TABLE))
+    gst_jpeg_get_default_huffman_tables (&priv->huf_tables);
+
+  // Update VA Huffman table if it changed for this scan
+  if (huffman_tables_updated (&priv->huf_tables)) {
+    slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW (JPEGBaseline, decoder);
+    if (!slice->huf_table) {
+      GST_ERROR ("failed to allocate Huffman tables");
+      huffman_tables_reset (&priv->huf_tables);
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
     }
-    slice_param->restart_interval = priv->mcu_restart;
-    slice_param->slice_horizontal_position = 0;
-    slice_param->slice_vertical_position = 0;
-
-    get_max_sampling_factors(&priv->frame_hdr, &h_max, &v_max);
-    mcu_width = 8 * h_max;
-    mcu_height = 8 * v_max;
-
-    if (scan_hdr.num_components == 1) { // Non-interleaved
-        const guint Csj = slice_param->components[0].component_selector;
-        const GstJpegFrameComponent * const fcp =
-            get_component(&priv->frame_hdr, Csj);
-
-        if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
-            GST_ERROR("failed to validate image component %u", Csj);
-            return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
-        }
-        mcu_width /= fcp->horizontal_factor;
-        mcu_height /= fcp->vertical_factor;
+    fill_huffman_table (slice->huf_table, &priv->huf_tables);
+    huffman_tables_reset (&priv->huf_tables);
+  }
+
+  slice_param = slice->param;
+  slice_param->num_components = scan_hdr.num_components;
+  for (i = 0; i < scan_hdr.num_components; i++) {
+    slice_param->components[i].component_selector =
+        scan_hdr.components[i].component_selector;
+    slice_param->components[i].dc_table_selector =
+        scan_hdr.components[i].dc_selector;
+    slice_param->components[i].ac_table_selector =
+        scan_hdr.components[i].ac_selector;
+  }
+  slice_param->restart_interval = priv->mcu_restart;
+  slice_param->slice_horizontal_position = 0;
+  slice_param->slice_vertical_position = 0;
+
+  get_max_sampling_factors (&priv->frame_hdr, &h_max, &v_max);
+  mcu_width = 8 * h_max;
+  mcu_height = 8 * v_max;
+
+  if (scan_hdr.num_components == 1) {   // Non-interleaved
+    const guint Csj = slice_param->components[0].component_selector;
+    const GstJpegFrameComponent *const fcp =
+        get_component (&priv->frame_hdr, Csj);
+
+    if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
+      GST_ERROR ("failed to validate image component %u", Csj);
+      return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
     }
-    slice_param->num_mcus =
-        ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
-        ((priv->frame_hdr.height + mcu_height - 1) / mcu_height);
-
-    priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    mcu_width /= fcp->horizontal_factor;
+    mcu_height /= fcp->vertical_factor;
+  }
+  slice_param->num_mcus =
+      ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
+      ((priv->frame_hdr.height + mcu_height - 1) / mcu_height);
+
+  priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_segment(GstVaapiDecoderJpeg *decoder, GstJpegSegment *seg)
+decode_segment (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
 
-    // Decode segment
-    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-    switch (seg->marker) {
+  // Decode segment
+  status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+  switch (seg->marker) {
     case GST_JPEG_MARKER_SOI:
-        priv->mcu_restart = 0;
-        priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
-        break;
+      priv->mcu_restart = 0;
+      priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
+      break;
     case GST_JPEG_MARKER_EOI:
-        priv->decoder_state = 0;
-        break;
+      priv->decoder_state = 0;
+      break;
     case GST_JPEG_MARKER_DAC:
-        GST_ERROR("unsupported arithmetic coding mode");
-        status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-        break;
+      GST_ERROR ("unsupported arithmetic coding mode");
+      status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+      break;
     case GST_JPEG_MARKER_DHT:
-        status = decode_huffman_table(decoder, seg);
-        break;
+      status = decode_huffman_table (decoder, seg);
+      break;
     case GST_JPEG_MARKER_DQT:
-        status = decode_quant_table(decoder, seg);
-        break;
+      status = decode_quant_table (decoder, seg);
+      break;
     case GST_JPEG_MARKER_DRI:
-        status = decode_restart_interval(decoder, seg);
-        break;
+      status = decode_restart_interval (decoder, seg);
+      break;
     case GST_JPEG_MARKER_SOS:
-        status = decode_scan(decoder, seg);
-        break;
+      status = decode_scan (decoder, seg);
+      break;
     default:
-        // SOFn segments
-        if (seg->marker >= GST_JPEG_MARKER_SOF_MIN &&
-            seg->marker <= GST_JPEG_MARKER_SOF_MAX)
-            status = decode_picture(decoder, seg);
-        break;
-    }
-    return status;
+      // SOFn segments
+      if (seg->marker >= GST_JPEG_MARKER_SOF_MIN &&
+          seg->marker <= GST_JPEG_MARKER_SOF_MAX)
+        status = decode_picture (decoder, seg);
+      break;
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-ensure_decoder(GstVaapiDecoderJpeg *decoder)
+ensure_decoder (GstVaapiDecoderJpeg * decoder)
 {
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-
-    if (!priv->is_opened) {
-        priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder);
-        if (!priv->is_opened)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+
+  if (!priv->is_opened) {
+    priv->is_opened = gst_vaapi_decoder_jpeg_open (decoder);
+    if (!priv->is_opened)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static gboolean
-is_scan_complete(GstJpegMarker marker)
+is_scan_complete (GstJpegMarker marker)
 {
-    // Scan is assumed to be complete when the new segment is not RSTi
-    return marker < GST_JPEG_MARKER_RST_MIN || marker > GST_JPEG_MARKER_RST_MAX;
+  // Scan is assumed to be complete when the new segment is not RSTi
+  return marker < GST_JPEG_MARKER_RST_MIN || marker > GST_JPEG_MARKER_RST_MAX;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_jpeg_parse(GstVaapiDecoder *base_decoder,
-    GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
-{
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstJpegMarker marker;
-    GstJpegSegment seg;
-    const guchar *buf;
-    guint buf_size, flags;
-    gint ofs1, ofs2;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    /* Expect at least 2 bytes for the marker */
-    buf_size = gst_adapter_available(adapter);
-    if (buf_size < 2)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-    buf = gst_adapter_map(adapter, buf_size);
-    if (!buf)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-    ofs1 = ps->input_offset1 - 2;
-    if (ofs1 < 0)
-        ofs1 = 0;
+gst_vaapi_decoder_jpeg_parse (GstVaapiDecoder * base_decoder,
+    GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiParserState *const ps = GST_VAAPI_PARSER_STATE (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstJpegMarker marker;
+  GstJpegSegment seg;
+  const guchar *buf;
+  guint buf_size, flags;
+  gint ofs1, ofs2;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    for (;;) {
-        // Skip any garbage until we reach SOI, if needed
-        if (!gst_jpeg_parse(&seg, buf, buf_size, ofs1)) {
-            gst_adapter_unmap(adapter);
-            ps->input_offset1 = buf_size;
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        }
-        ofs1 = seg.offset;
-
-        marker = seg.marker;
-        if (!VALID_STATE(parser, GOT_SOI) && marker != GST_JPEG_MARKER_SOI)
-            continue;
-        if (marker == GST_JPEG_MARKER_SOS) {
-            ofs2 = ps->input_offset2 - 2;
-            if (ofs2 < ofs1 + seg.size)
-                ofs2 = ofs1 + seg.size;
-
-            // Parse the whole scan + ECSs, including RSTi
-            for (;;) {
-                if (!gst_jpeg_parse(&seg, buf, buf_size, ofs2)) {
-                    gst_adapter_unmap(adapter);
-                    ps->input_offset1 = ofs1;
-                    ps->input_offset2 = buf_size;
-                    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-                }
-
-                if (is_scan_complete(seg.marker))
-                    break;
-                ofs2 = seg.offset + seg.size;
-            }
-            ofs2 = seg.offset - 2;
-        }
-        else {
-            // Check that the whole segment is actually available (in buffer)
-            ofs2 = ofs1 + seg.size;
-            if (ofs2 > buf_size) {
-                gst_adapter_unmap(adapter);
-                ps->input_offset1 = ofs1;
-                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-            }
-        }
-        break;
+  /* Expect at least 2 bytes for the marker */
+  buf_size = gst_adapter_available (adapter);
+  if (buf_size < 2)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  buf = gst_adapter_map (adapter, buf_size);
+  if (!buf)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  ofs1 = ps->input_offset1 - 2;
+  if (ofs1 < 0)
+    ofs1 = 0;
+
+  for (;;) {
+    // Skip any garbage until we reach SOI, if needed
+    if (!gst_jpeg_parse (&seg, buf, buf_size, ofs1)) {
+      gst_adapter_unmap (adapter);
+      ps->input_offset1 = buf_size;
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
     }
-    gst_adapter_unmap(adapter);
-
-    unit->size = ofs2 - ofs1;
-    unit_set_marker_code(unit, marker);
-    gst_adapter_flush(adapter, ofs1);
-    ps->input_offset1 = 2;
-    ps->input_offset2 = 2;
+    ofs1 = seg.offset;
+
+    marker = seg.marker;
+    if (!VALID_STATE (parser, GOT_SOI) && marker != GST_JPEG_MARKER_SOI)
+      continue;
+    if (marker == GST_JPEG_MARKER_SOS) {
+      ofs2 = ps->input_offset2 - 2;
+      if (ofs2 < ofs1 + seg.size)
+        ofs2 = ofs1 + seg.size;
+
+      // Parse the whole scan + ECSs, including RSTi
+      for (;;) {
+        if (!gst_jpeg_parse (&seg, buf, buf_size, ofs2)) {
+          gst_adapter_unmap (adapter);
+          ps->input_offset1 = ofs1;
+          ps->input_offset2 = buf_size;
+          return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+        }
 
-    flags = 0;
-    switch (marker) {
+        if (is_scan_complete (seg.marker))
+          break;
+        ofs2 = seg.offset + seg.size;
+      }
+      ofs2 = seg.offset - 2;
+    } else {
+      // Check that the whole segment is actually available (in buffer)
+      ofs2 = ofs1 + seg.size;
+      if (ofs2 > buf_size) {
+        gst_adapter_unmap (adapter);
+        ps->input_offset1 = ofs1;
+        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      }
+    }
+    break;
+  }
+  gst_adapter_unmap (adapter);
+
+  unit->size = ofs2 - ofs1;
+  unit_set_marker_code (unit, marker);
+  gst_adapter_flush (adapter, ofs1);
+  ps->input_offset1 = 2;
+  ps->input_offset2 = 2;
+
+  flags = 0;
+  switch (marker) {
     case GST_JPEG_MARKER_SOI:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
+      break;
     case GST_JPEG_MARKER_EOI:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        priv->parser_state = 0;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      priv->parser_state = 0;
+      break;
     case GST_JPEG_MARKER_SOS:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
+      break;
     case GST_JPEG_MARKER_DAC:
     case GST_JPEG_MARKER_DHT:
     case GST_JPEG_MARKER_DQT:
-        if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOF)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        break;
+      if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOF)
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      break;
     case GST_JPEG_MARKER_DRI:
-        if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOS)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        break;
-    case GST_JPEG_MARKER_DNL:
+      if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOS)
         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        break;
+      break;
+    case GST_JPEG_MARKER_DNL:
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      break;
     case GST_JPEG_MARKER_COM:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
     default:
-        /* SOFn segments */
-        if (marker >= GST_JPEG_MARKER_SOF_MIN &&
-            marker <= GST_JPEG_MARKER_SOF_MAX)
-            priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
-
-        /* Application segments */
-        else if (marker >= GST_JPEG_MARKER_APP_MIN &&
-                 marker <= GST_JPEG_MARKER_APP_MAX)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-
-        /* Reserved */
-        else if (marker >= 0x02 && marker <= 0xbf)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
-    }
-    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      /* SOFn segments */
+      if (marker >= GST_JPEG_MARKER_SOF_MIN &&
+          marker <= GST_JPEG_MARKER_SOF_MAX)
+        priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
+
+      /* Application segments */
+      else if (marker >= GST_JPEG_MARKER_APP_MIN &&
+          marker <= GST_JPEG_MARKER_APP_MAX)
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+
+      /* Reserved */
+      else if (marker >= 0x02 && marker <= 0xbf)
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
+  }
+  GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
-{
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstJpegSegment seg;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+gst_vaapi_decoder_jpeg_decode (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstJpegSegment seg;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    seg.marker = unit_get_marker_code(unit);
-    seg.data = map_info.data;
-    seg.offset = unit->offset;
-    seg.size = unit->size;
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    status = decode_segment(decoder, &seg);
-    gst_buffer_unmap(buffer, &map_info);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  seg.marker = unit_get_marker_code (unit);
+  seg.data = map_info.data;
+  seg.offset = unit->offset;
+  seg.size = unit->size;
+
+  status = decode_segment (decoder, &seg);
+  gst_buffer_unmap (buffer, &map_info);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_jpeg_start_frame(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *base_unit)
-{
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
-    GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
-    GstVaapiPicture *picture;
-    GstVaapiDecoderStatus status;
-
-    if (!VALID_STATE(decoder, GOT_SOF))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    status = ensure_context(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_ERROR("failed to reset context");
-        return status;
-    }
+gst_vaapi_decoder_jpeg_start_frame (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * base_unit)
+{
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
+  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
+  GstVaapiPicture *picture;
+  GstVaapiDecoderStatus status;
 
-    picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
-    if (!picture) {
-        GST_ERROR("failed to allocate picture");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_replace(&priv->current_picture, picture);
-    gst_vaapi_picture_unref(picture);
+  if (!VALID_STATE (decoder, GOT_SOF))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    if (!fill_picture(decoder, picture, &priv->frame_hdr))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  status = ensure_context (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_ERROR ("failed to reset context");
+    return status;
+  }
 
-    status = fill_quantization_table(decoder, picture);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  picture = GST_VAAPI_PICTURE_NEW (JPEGBaseline, decoder);
+  if (!picture) {
+    GST_ERROR ("failed to allocate picture");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_replace (&priv->current_picture, picture);
+  gst_vaapi_picture_unref (picture);
 
-    /* Update presentation time */
-    picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!fill_picture (decoder, picture, &priv->frame_hdr))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+  status = fill_quantization_table (decoder, picture);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+
+  /* Update presentation time */
+  picture->pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_jpeg_end_frame(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_jpeg_end_frame (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderJpeg * const decoder =
-        GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
+  GstVaapiDecoderJpeg *const decoder =
+      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);
 
-    return decode_current_picture(decoder);
+  return decode_current_picture (decoder);
 }
 
 static void
-gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
+gst_vaapi_decoder_jpeg_class_init (GstVaapiDecoderJpegClass * klass)
 {
-    GstVaapiMiniObjectClass * const object_class =
-        GST_VAAPI_MINI_OBJECT_CLASS(klass);
-    GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
-
-    object_class->size          = sizeof(GstVaapiDecoderJpeg);
-    object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
-
-    decoder_class->create       = gst_vaapi_decoder_jpeg_create;
-    decoder_class->destroy      = gst_vaapi_decoder_jpeg_destroy;
-    decoder_class->parse        = gst_vaapi_decoder_jpeg_parse;
-    decoder_class->decode       = gst_vaapi_decoder_jpeg_decode;
-    decoder_class->start_frame  = gst_vaapi_decoder_jpeg_start_frame;
-    decoder_class->end_frame    = gst_vaapi_decoder_jpeg_end_frame;
+  GstVaapiMiniObjectClass *const object_class =
+      GST_VAAPI_MINI_OBJECT_CLASS (klass);
+  GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
+
+  object_class->size = sizeof (GstVaapiDecoderJpeg);
+  object_class->finalize = (GDestroyNotify) gst_vaapi_decoder_finalize;
+
+  decoder_class->create = gst_vaapi_decoder_jpeg_create;
+  decoder_class->destroy = gst_vaapi_decoder_jpeg_destroy;
+  decoder_class->parse = gst_vaapi_decoder_jpeg_parse;
+  decoder_class->decode = gst_vaapi_decoder_jpeg_decode;
+  decoder_class->start_frame = gst_vaapi_decoder_jpeg_start_frame;
+  decoder_class->end_frame = gst_vaapi_decoder_jpeg_end_frame;
 }
 
 static inline const GstVaapiDecoderClass *
-gst_vaapi_decoder_jpeg_class(void)
+gst_vaapi_decoder_jpeg_class (void)
 {
-    static GstVaapiDecoderJpegClass g_class;
-    static gsize g_class_init = FALSE;
-
-    if (g_once_init_enter(&g_class_init)) {
-        gst_vaapi_decoder_jpeg_class_init(&g_class);
-        g_once_init_leave(&g_class_init, TRUE);
-    }
-    return GST_VAAPI_DECODER_CLASS(&g_class);
+  static GstVaapiDecoderJpegClass g_class;
+  static gsize g_class_init = FALSE;
+
+  if (g_once_init_enter (&g_class_init)) {
+    gst_vaapi_decoder_jpeg_class_init (&g_class);
+    g_once_init_leave (&g_class_init, TRUE);
+  }
+  return GST_VAAPI_DECODER_CLASS (&g_class);
 }
 
 /**
@@ -900,7 +885,7 @@ gst_vaapi_decoder_jpeg_class(void)
  * Return value: the newly allocated #GstVaapiDecoder object
  */
 GstVaapiDecoder *
-gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
+gst_vaapi_decoder_jpeg_new (GstVaapiDisplay * display, GstCaps * caps)
 {
-    return gst_vaapi_decoder_new(gst_vaapi_decoder_jpeg_class(), display, caps);
+  return gst_vaapi_decoder_new (gst_vaapi_decoder_jpeg_class (), display, caps);
 }
index 6dc9f0c..b828a9f 100644 (file)
 /* ------------------------------------------------------------------------- */
 
 typedef struct _PTSGenerator PTSGenerator;
-struct _PTSGenerator {
-    GstClockTime        gop_pts; // Current GOP PTS
-    GstClockTime        max_pts; // Max picture PTS
-    guint               gop_tsn; // Absolute GOP TSN
-    guint               max_tsn; // Max picture TSN, relative to last GOP TSN
-    guint               ovl_tsn; // How many times TSN overflowed since GOP
-    guint               lst_tsn; // Last picture TSN
-    guint               fps_n;
-    guint               fps_d;
+struct _PTSGenerator
+{
+  GstClockTime gop_pts;         // Current GOP PTS
+  GstClockTime max_pts;         // Max picture PTS
+  guint gop_tsn;                // Absolute GOP TSN
+  guint max_tsn;                // Max picture TSN, relative to last GOP TSN
+  guint ovl_tsn;                // How many times TSN overflowed since GOP
+  guint lst_tsn;                // Last picture TSN
+  guint fps_n;
+  guint fps_d;
 };
 
 static void
-pts_init(PTSGenerator *tsg)
+pts_init (PTSGenerator * tsg)
 {
-    tsg->gop_pts = GST_CLOCK_TIME_NONE;
-    tsg->max_pts = GST_CLOCK_TIME_NONE;
-    tsg->gop_tsn = 0;
-    tsg->max_tsn = 0;
-    tsg->ovl_tsn = 0;
-    tsg->lst_tsn = 0;
-    tsg->fps_n   = 0;
-    tsg->fps_d   = 0;
+  tsg->gop_pts = GST_CLOCK_TIME_NONE;
+  tsg->max_pts = GST_CLOCK_TIME_NONE;
+  tsg->gop_tsn = 0;
+  tsg->max_tsn = 0;
+  tsg->ovl_tsn = 0;
+  tsg->lst_tsn = 0;
+  tsg->fps_n = 0;
+  tsg->fps_d = 0;
 }
 
 static inline GstClockTime
-pts_get_duration(PTSGenerator *tsg, guint num_frames)
+pts_get_duration (PTSGenerator * tsg, guint num_frames)
 {
-    return gst_util_uint64_scale(num_frames,
-                                 GST_SECOND * tsg->fps_d, tsg->fps_n);
+  return gst_util_uint64_scale (num_frames,
+      GST_SECOND * tsg->fps_d, tsg->fps_n);
 }
 
 static inline guint
-pts_get_poc(PTSGenerator *tsg)
+pts_get_poc (PTSGenerator * tsg)
 {
-    return tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->lst_tsn;
+  return tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->lst_tsn;
 }
 
 static void
-pts_set_framerate(PTSGenerator *tsg, guint fps_n, guint fps_d)
+pts_set_framerate (PTSGenerator * tsg, guint fps_n, guint fps_d)
 {
-    tsg->fps_n = fps_n;
-    tsg->fps_d = fps_d;
+  tsg->fps_n = fps_n;
+  tsg->fps_d = fps_d;
 }
 
 static void
-pts_sync(PTSGenerator *tsg, GstClockTime gop_pts)
+pts_sync (PTSGenerator * tsg, GstClockTime gop_pts)
 {
-    guint gop_tsn;
-
-    if (!GST_CLOCK_TIME_IS_VALID(gop_pts) ||
-        (GST_CLOCK_TIME_IS_VALID(tsg->max_pts) && tsg->max_pts >= gop_pts)) {
-        /* Invalid GOP PTS, interpolate from the last known picture PTS */
-        if (GST_CLOCK_TIME_IS_VALID(tsg->max_pts)) {
-            gop_pts = tsg->max_pts + pts_get_duration(tsg, 1);
-            gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
-        }
-        else {
-            gop_pts = 0;
-            gop_tsn = 0;
-        }
-    }
-    else {
-        /* Interpolate GOP TSN from this valid PTS */
-        if (GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
-            gop_tsn = tsg->gop_tsn + gst_util_uint64_scale(
-                gop_pts - tsg->gop_pts + pts_get_duration(tsg, 1) - 1,
-                tsg->fps_n, GST_SECOND * tsg->fps_d);
-        else
-            gop_tsn = 0;
+  guint gop_tsn;
+
+  if (!GST_CLOCK_TIME_IS_VALID (gop_pts) ||
+      (GST_CLOCK_TIME_IS_VALID (tsg->max_pts) && tsg->max_pts >= gop_pts)) {
+    /* Invalid GOP PTS, interpolate from the last known picture PTS */
+    if (GST_CLOCK_TIME_IS_VALID (tsg->max_pts)) {
+      gop_pts = tsg->max_pts + pts_get_duration (tsg, 1);
+      gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
+    } else {
+      gop_pts = 0;
+      gop_tsn = 0;
     }
-
-    tsg->gop_pts = gop_pts;
-    tsg->gop_tsn = gop_tsn;
-    tsg->max_tsn = 0;
-    tsg->ovl_tsn = 0;
-    tsg->lst_tsn = 0;
+  } else {
+    /* Interpolate GOP TSN from this valid PTS */
+    if (GST_CLOCK_TIME_IS_VALID (tsg->gop_pts))
+      gop_tsn =
+          tsg->gop_tsn + gst_util_uint64_scale (gop_pts - tsg->gop_pts +
+          pts_get_duration (tsg, 1) - 1, tsg->fps_n, GST_SECOND * tsg->fps_d);
+    else
+      gop_tsn = 0;
+  }
+
+  tsg->gop_pts = gop_pts;
+  tsg->gop_tsn = gop_tsn;
+  tsg->max_tsn = 0;
+  tsg->ovl_tsn = 0;
+  tsg->lst_tsn = 0;
 }
 
 static GstClockTime
-pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
+pts_eval (PTSGenerator * tsg, GstClockTime pic_pts, guint pic_tsn)
 {
-    GstClockTime pts;
-
-    if (!GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
-        tsg->gop_pts = pts_get_duration(tsg, pic_tsn);
-
-    pts = pic_pts;
-    if (!GST_CLOCK_TIME_IS_VALID (pts))
-       pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
-    else if (pts == tsg->gop_pts) {
-        /* The picture following the GOP header shall be an I-frame.
-           So we can compensate for the GOP start time from here */
-        tsg->gop_pts -= pts_get_duration(tsg, pic_tsn);
-    }
-
-    if (!GST_CLOCK_TIME_IS_VALID(tsg->max_pts) || tsg->max_pts < pts)
-        tsg->max_pts = pts;
-
-    if (tsg->max_tsn < pic_tsn)
-        tsg->max_tsn = pic_tsn;
-    else if (tsg->max_tsn == 1023 && pic_tsn < tsg->lst_tsn) { /* TSN wrapped */
-        tsg->max_tsn = pic_tsn;
-        tsg->ovl_tsn++;
-    }
-    tsg->lst_tsn = pic_tsn;
-
-    return pts;
+  GstClockTime pts;
+
+  if (!GST_CLOCK_TIME_IS_VALID (tsg->gop_pts))
+    tsg->gop_pts = pts_get_duration (tsg, pic_tsn);
+
+  pts = pic_pts;
+  if (!GST_CLOCK_TIME_IS_VALID (pts))
+    pts = tsg->gop_pts + pts_get_duration (tsg, tsg->ovl_tsn * 1024 + pic_tsn);
+  else if (pts == tsg->gop_pts) {
+    /* The picture following the GOP header shall be an I-frame.
+       So we can compensate for the GOP start time from here */
+    tsg->gop_pts -= pts_get_duration (tsg, pic_tsn);
+  }
+
+  if (!GST_CLOCK_TIME_IS_VALID (tsg->max_pts) || tsg->max_pts < pts)
+    tsg->max_pts = pts;
+
+  if (tsg->max_tsn < pic_tsn)
+    tsg->max_tsn = pic_tsn;
+  else if (tsg->max_tsn == 1023 && pic_tsn < tsg->lst_tsn) {    /* TSN wrapped */
+    tsg->max_tsn = pic_tsn;
+    tsg->ovl_tsn++;
+  }
+  tsg->lst_tsn = pic_tsn;
+
+  return pts;
 }
 
 /* ------------------------------------------------------------------------- */
@@ -158,50 +157,52 @@ pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
 /* ------------------------------------------------------------------------- */
 
 typedef struct _GstVaapiParserInfoMpeg2 GstVaapiParserInfoMpeg2;
-struct _GstVaapiParserInfoMpeg2 {
-    GstVaapiMiniObject  parent_instance;
-    GstMpegVideoPacket  packet;
-    guint8              extension_type; /* for Extension packets */
-    union {
-        GstMpegVideoSequenceHdr         seq_hdr;
-        GstMpegVideoSequenceExt         seq_ext;
-        GstMpegVideoSequenceDisplayExt  seq_display_ext;
-        GstMpegVideoSequenceScalableExt seq_scalable_ext;
-        GstMpegVideoGop                 gop;
-        GstMpegVideoQuantMatrixExt      quant_matrix;
-        GstMpegVideoPictureHdr          pic_hdr;
-        GstMpegVideoPictureExt          pic_ext;
-        GstMpegVideoSliceHdr            slice_hdr;
-    }                                   data;
+struct _GstVaapiParserInfoMpeg2
+{
+  GstVaapiMiniObject parent_instance;
+  GstMpegVideoPacket packet;
+  guint8 extension_type;        /* for Extension packets */
+  union
+  {
+    GstMpegVideoSequenceHdr seq_hdr;
+    GstMpegVideoSequenceExt seq_ext;
+    GstMpegVideoSequenceDisplayExt seq_display_ext;
+    GstMpegVideoSequenceScalableExt seq_scalable_ext;
+    GstMpegVideoGop gop;
+    GstMpegVideoQuantMatrixExt quant_matrix;
+    GstMpegVideoPictureHdr pic_hdr;
+    GstMpegVideoPictureExt pic_ext;
+    GstMpegVideoSliceHdr slice_hdr;
+  } data;
 };
 
 static inline const GstVaapiMiniObjectClass *
-gst_vaapi_parser_info_mpeg2_class(void)
+gst_vaapi_parser_info_mpeg2_class (void)
 {
-    static const GstVaapiMiniObjectClass GstVaapiParserInfoMpeg2Class = {
-        sizeof(GstVaapiParserInfoMpeg2),
-        NULL
-    };
-    return &GstVaapiParserInfoMpeg2Class;
+  static const GstVaapiMiniObjectClass GstVaapiParserInfoMpeg2Class = {
+    sizeof (GstVaapiParserInfoMpeg2),
+    NULL
+  };
+  return &GstVaapiParserInfoMpeg2Class;
 }
 
 static inline GstVaapiParserInfoMpeg2 *
-gst_vaapi_parser_info_mpeg2_new(void)
+gst_vaapi_parser_info_mpeg2_new (void)
 {
-    return (GstVaapiParserInfoMpeg2 *)
-        gst_vaapi_mini_object_new(gst_vaapi_parser_info_mpeg2_class());
+  return (GstVaapiParserInfoMpeg2 *)
+      gst_vaapi_mini_object_new (gst_vaapi_parser_info_mpeg2_class ());
 }
 
 static inline GstVaapiParserInfoMpeg2 *
-gst_vaapi_parser_info_mpeg2_ensure(GstVaapiParserInfoMpeg2 **pi_ptr)
+gst_vaapi_parser_info_mpeg2_ensure (GstVaapiParserInfoMpeg2 ** pi_ptr)
 {
-    GstVaapiParserInfoMpeg2 *pi = *pi_ptr;
+  GstVaapiParserInfoMpeg2 *pi = *pi_ptr;
 
-    if (G_LIKELY(pi != NULL))
-        return pi;
-
-    *pi_ptr = pi = gst_vaapi_parser_info_mpeg2_new();
+  if (G_LIKELY (pi != NULL))
     return pi;
+
+  *pi_ptr = pi = gst_vaapi_parser_info_mpeg2_new ();
+  return pi;
 }
 
 #define gst_vaapi_parser_info_mpeg2_ref(pi) \
@@ -221,57 +222,55 @@ gst_vaapi_parser_info_mpeg2_ensure(GstVaapiParserInfoMpeg2 **pi_ptr)
 #define GST_VAAPI_DECODER_MPEG2_CAST(decoder) \
     ((GstVaapiDecoderMpeg2 *)(decoder))
 
-typedef struct _GstVaapiDecoderMpeg2Private     GstVaapiDecoderMpeg2Private;
-typedef struct _GstVaapiDecoderMpeg2Class       GstVaapiDecoderMpeg2Class;
-
-typedef enum {
-    GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR    = 1 << 0,
-    GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT    = 1 << 1,
-    GST_MPEG_VIDEO_STATE_GOT_PIC_HDR    = 1 << 2,
-    GST_MPEG_VIDEO_STATE_GOT_PIC_EXT    = 1 << 3,
-    GST_MPEG_VIDEO_STATE_GOT_SLICE      = 1 << 4,
-
-    GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (
-        GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
-        GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT),
-    GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (
-        GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
-        GST_MPEG_VIDEO_STATE_GOT_PIC_EXT),
-    GST_MPEG_VIDEO_STATE_VALID_PICTURE = (
-        GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS|
-        GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS|
-        GST_MPEG_VIDEO_STATE_GOT_SLICE)
+typedef struct _GstVaapiDecoderMpeg2Private GstVaapiDecoderMpeg2Private;
+typedef struct _GstVaapiDecoderMpeg2Class GstVaapiDecoderMpeg2Class;
+
+typedef enum
+{
+  GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR = 1 << 0,
+  GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT = 1 << 1,
+  GST_MPEG_VIDEO_STATE_GOT_PIC_HDR = 1 << 2,
+  GST_MPEG_VIDEO_STATE_GOT_PIC_EXT = 1 << 3,
+  GST_MPEG_VIDEO_STATE_GOT_SLICE = 1 << 4,
+
+  GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
+      GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT),
+  GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (GST_MPEG_VIDEO_STATE_GOT_PIC_HDR |
+      GST_MPEG_VIDEO_STATE_GOT_PIC_EXT),
+  GST_MPEG_VIDEO_STATE_VALID_PICTURE = (GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS |
+      GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS | GST_MPEG_VIDEO_STATE_GOT_SLICE)
 } GstMpegVideoState;
 
-struct _GstVaapiDecoderMpeg2Private {
-    GstVaapiProfile             profile;
-    GstVaapiProfile             hw_profile;
-    guint                       width;
-    guint                       height;
-    guint                       fps_n;
-    guint                       fps_d;
-    guint                       state;
-    GstVaapiRectangle           crop_rect;
-    GstVaapiParserInfoMpeg2    *seq_hdr;
-    GstVaapiParserInfoMpeg2    *seq_ext;
-    GstVaapiParserInfoMpeg2    *seq_display_ext;
-    GstVaapiParserInfoMpeg2    *seq_scalable_ext;
-    GstVaapiParserInfoMpeg2    *gop;
-    GstVaapiParserInfoMpeg2    *pic_hdr;
-    GstVaapiParserInfoMpeg2    *pic_ext;
-    GstVaapiParserInfoMpeg2    *pic_display_ext;
-    GstVaapiParserInfoMpeg2    *quant_matrix;
-    GstVaapiParserInfoMpeg2    *slice_hdr;
-    GstVaapiPicture            *current_picture;
-    GstVaapiDpb                *dpb;
-    PTSGenerator                tsg;
-    guint                       is_opened               : 1;
-    guint                       size_changed            : 1;
-    guint                       profile_changed         : 1;
-    guint                       quant_matrix_changed    : 1;
-    guint                       progressive_sequence    : 1;
-    guint                       closed_gop              : 1;
-    guint                       broken_link             : 1;
+struct _GstVaapiDecoderMpeg2Private
+{
+  GstVaapiProfile profile;
+  GstVaapiProfile hw_profile;
+  guint width;
+  guint height;
+  guint fps_n;
+  guint fps_d;
+  guint state;
+  GstVaapiRectangle crop_rect;
+  GstVaapiParserInfoMpeg2 *seq_hdr;
+  GstVaapiParserInfoMpeg2 *seq_ext;
+  GstVaapiParserInfoMpeg2 *seq_display_ext;
+  GstVaapiParserInfoMpeg2 *seq_scalable_ext;
+  GstVaapiParserInfoMpeg2 *gop;
+  GstVaapiParserInfoMpeg2 *pic_hdr;
+  GstVaapiParserInfoMpeg2 *pic_ext;
+  GstVaapiParserInfoMpeg2 *pic_display_ext;
+  GstVaapiParserInfoMpeg2 *quant_matrix;
+  GstVaapiParserInfoMpeg2 *slice_hdr;
+  GstVaapiPicture *current_picture;
+  GstVaapiDpb *dpb;
+  PTSGenerator tsg;
+  guint is_opened:1;
+  guint size_changed:1;
+  guint profile_changed:1;
+  guint quant_matrix_changed:1;
+  guint progressive_sequence:1;
+  guint closed_gop:1;
+  guint broken_link:1;
 };
 
 /**
@@ -279,10 +278,11 @@ struct _GstVaapiDecoderMpeg2Private {
  *
  * A decoder based on Mpeg2.
  */
-struct _GstVaapiDecoderMpeg2 {
-    /*< private >*/
-    GstVaapiDecoder             parent_instance;
-    GstVaapiDecoderMpeg2Private priv;
+struct _GstVaapiDecoderMpeg2
+{
+  /*< private > */
+  GstVaapiDecoder parent_instance;
+  GstVaapiDecoderMpeg2Private priv;
 };
 
 /**
@@ -290,1299 +290,1303 @@ struct _GstVaapiDecoderMpeg2 {
  *
  * A decoder class based on Mpeg2.
  */
-struct _GstVaapiDecoderMpeg2Class {
-    /*< private >*/
-    GstVaapiDecoderClass parent_class;
+struct _GstVaapiDecoderMpeg2Class
+{
+  /*< private > */
+  GstVaapiDecoderClass parent_class;
 };
 
 static void
-gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_close (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
 
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_hdr, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->gop, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->pic_hdr, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->slice_hdr, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_hdr, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_display_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_scalable_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->gop, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->pic_hdr, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->pic_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->pic_display_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->quant_matrix, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->slice_hdr, NULL);
 
-    priv->state = 0;
+  priv->state = 0;
 
-    gst_vaapi_dpb_replace(&priv->dpb, NULL);
+  gst_vaapi_dpb_replace (&priv->dpb, NULL);
 }
 
 static gboolean
-gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_open (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    gst_vaapi_decoder_mpeg2_close(decoder);
+  gst_vaapi_decoder_mpeg2_close (decoder);
 
-    priv->dpb = gst_vaapi_dpb_new(2);
-    if (!priv->dpb)
-        return FALSE;
+  priv->dpb = gst_vaapi_dpb_new (2);
+  if (!priv->dpb)
+    return FALSE;
 
-    pts_init(&priv->tsg);
-    return TRUE;
+  pts_init (&priv->tsg);
+  return TRUE;
 }
 
 static void
-gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg2_destroy (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
 
-    gst_vaapi_decoder_mpeg2_close(decoder);
+  gst_vaapi_decoder_mpeg2_close (decoder);
 }
 
 static gboolean
-gst_vaapi_decoder_mpeg2_create(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg2_create (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-
-    priv->hw_profile            = GST_VAAPI_PROFILE_UNKNOWN;
-    priv->profile               = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
-    priv->profile_changed       = TRUE; /* Allow fallbacks to work */
-    return TRUE;
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+
+  priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
+  priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
+  priv->profile_changed = TRUE; /* Allow fallbacks to work */
+  return TRUE;
 }
 
 static inline void
-copy_quant_matrix(guint8 dst[64], const guint8 src[64])
+copy_quant_matrix (guint8 dst[64], const guint8 src[64])
 {
-    memcpy(dst, src, 64);
+  memcpy (dst, src, 64);
 }
 
 static const char *
-get_profile_str(GstVaapiProfile profile)
+get_profile_str (GstVaapiProfile profile)
 {
-    char *str;
-
-    switch (profile) {
-    case GST_VAAPI_PROFILE_MPEG2_SIMPLE:    str = "simple";     break;
-    case GST_VAAPI_PROFILE_MPEG2_MAIN:      str = "main";       break;
-    case GST_VAAPI_PROFILE_MPEG2_HIGH:      str = "high";       break;
-    default:                                str = "<unknown>";  break;
-    }
-    return str;
+  char *str;
+
+  switch (profile) {
+    case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
+      str = "simple";
+      break;
+    case GST_VAAPI_PROFILE_MPEG2_MAIN:
+      str = "main";
+      break;
+    case GST_VAAPI_PROFILE_MPEG2_HIGH:
+      str = "high";
+      break;
+    default:
+      str = "<unknown>";
+      break;
+  }
+  return str;
 }
 
 static GstVaapiProfile
-get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
+get_profile (GstVaapiDecoderMpeg2 * decoder, GstVaapiEntrypoint entrypoint)
 {
-    GstVaapiDisplay * const va_display = GST_VAAPI_DECODER_DISPLAY(decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstVaapiProfile profile = priv->profile;
+  GstVaapiDisplay *const va_display = GST_VAAPI_DECODER_DISPLAY (decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstVaapiProfile profile = priv->profile;
 
-    do {
-        /* Return immediately if the exact same profile was found */
-        if (gst_vaapi_display_has_decoder(va_display, profile, entrypoint))
-            break;
+  do {
+    /* Return immediately if the exact same profile was found */
+    if (gst_vaapi_display_has_decoder (va_display, profile, entrypoint))
+      break;
 
-        /* Otherwise, try to map to a higher profile */
-        switch (profile) {
-        case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
-            profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
-            break;
-        case GST_VAAPI_PROFILE_MPEG2_MAIN:
-            profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
-            break;
-        case GST_VAAPI_PROFILE_MPEG2_HIGH:
-            // Try to map to main profile if no high profile specific bits used
-            if (priv->profile == profile &&
-                !priv->seq_scalable_ext &&
-                (priv->seq_ext &&
-                 priv->seq_ext->data.seq_ext.chroma_format == 1)) {
-                profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
-                break;
-            }
-            // fall-through
-        default:
-            profile = GST_VAAPI_PROFILE_UNKNOWN;
-            break;
+    /* Otherwise, try to map to a higher profile */
+    switch (profile) {
+      case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
+        profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
+        break;
+      case GST_VAAPI_PROFILE_MPEG2_MAIN:
+        profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
+        break;
+      case GST_VAAPI_PROFILE_MPEG2_HIGH:
+        // Try to map to main profile if no high profile specific bits used
+        if (priv->profile == profile &&
+            !priv->seq_scalable_ext &&
+            (priv->seq_ext && priv->seq_ext->data.seq_ext.chroma_format == 1)) {
+          profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
+          break;
         }
-    } while (profile != GST_VAAPI_PROFILE_UNKNOWN);
+        // fall-through
+      default:
+        profile = GST_VAAPI_PROFILE_UNKNOWN;
+        break;
+    }
+  } while (profile != GST_VAAPI_PROFILE_UNKNOWN);
 
-    if (profile != priv->profile)
-        GST_INFO("forced %s profile to %s profile",
-                 get_profile_str(priv->profile), get_profile_str(profile));
-    return profile;
+  if (profile != priv->profile)
+    GST_INFO ("forced %s profile to %s profile",
+        get_profile_str (priv->profile), get_profile_str (profile));
+  return profile;
 }
 
 static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderMpeg2 *decoder)
+ensure_context (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
-    gboolean reset_context = FALSE;
-
-    if (priv->profile_changed) {
-        GST_DEBUG("profile changed");
-        priv->profile_changed = FALSE;
-        reset_context         = TRUE;
-
-        priv->hw_profile = get_profile(decoder, entrypoint);
-        if (priv->hw_profile == GST_VAAPI_PROFILE_UNKNOWN)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-
-    if (priv->size_changed) {
-        GST_DEBUG("size changed");
-        priv->size_changed = FALSE;
-        reset_context      = TRUE;
-    }
-
-    if (reset_context) {
-        GstVaapiContextInfo info;
-
-        info.profile    = priv->hw_profile;
-        info.entrypoint = entrypoint;
-        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
-        info.width      = priv->width;
-        info.height     = priv->height;
-        info.ref_frames = 2;
-        reset_context   = gst_vaapi_decoder_ensure_context(
-            GST_VAAPI_DECODER_CAST(decoder),
-            &info
-        );
-        if (!reset_context)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+  gboolean reset_context = FALSE;
+
+  if (priv->profile_changed) {
+    GST_DEBUG ("profile changed");
+    priv->profile_changed = FALSE;
+    reset_context = TRUE;
+
+    priv->hw_profile = get_profile (decoder, entrypoint);
+    if (priv->hw_profile == GST_VAAPI_PROFILE_UNKNOWN)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+
+  if (priv->size_changed) {
+    GST_DEBUG ("size changed");
+    priv->size_changed = FALSE;
+    reset_context = TRUE;
+  }
+
+  if (reset_context) {
+    GstVaapiContextInfo info;
+
+    info.profile = priv->hw_profile;
+    info.entrypoint = entrypoint;
+    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+    info.width = priv->width;
+    info.height = priv->height;
+    info.ref_frames = 2;
+    reset_context =
+        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER_CAST (decoder),
+        &info);
+    if (!reset_context)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
+ensure_quant_matrix (GstVaapiDecoderMpeg2 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr->data.seq_hdr;
-    VAIQMatrixBufferMPEG2 *iq_matrix;
-    guint8 *intra_quant_matrix = NULL;
-    guint8 *non_intra_quant_matrix = NULL;
-    guint8 *chroma_intra_quant_matrix = NULL;
-    guint8 *chroma_non_intra_quant_matrix = NULL;
-
-    if (!priv->quant_matrix_changed)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    priv->quant_matrix_changed = FALSE;
-
-    picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(MPEG2, decoder);
-    if (!picture->iq_matrix) {
-        GST_ERROR("failed to allocate IQ matrix");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    iq_matrix = picture->iq_matrix->param;
-
-    intra_quant_matrix     = seq_hdr->intra_quantizer_matrix;
-    non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
-
-    if (priv->quant_matrix) {
-        GstMpegVideoQuantMatrixExt * const quant_matrix =
-            &priv->quant_matrix->data.quant_matrix;
-        if (quant_matrix->load_intra_quantiser_matrix)
-            intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
-        if (quant_matrix->load_non_intra_quantiser_matrix)
-            non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
-        if (quant_matrix->load_chroma_intra_quantiser_matrix)
-            chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
-        if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
-            chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
-    }
-
-    iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
-    if (intra_quant_matrix)
-        copy_quant_matrix(iq_matrix->intra_quantiser_matrix,
-                          intra_quant_matrix);
-
-    iq_matrix->load_non_intra_quantiser_matrix = non_intra_quant_matrix != NULL;
-    if (non_intra_quant_matrix)
-        copy_quant_matrix(iq_matrix->non_intra_quantiser_matrix,
-                          non_intra_quant_matrix);
-
-    iq_matrix->load_chroma_intra_quantiser_matrix = chroma_intra_quant_matrix != NULL;
-    if (chroma_intra_quant_matrix)
-        copy_quant_matrix(iq_matrix->chroma_intra_quantiser_matrix,
-                          chroma_intra_quant_matrix);
-
-    iq_matrix->load_chroma_non_intra_quantiser_matrix = chroma_non_intra_quant_matrix != NULL;
-    if (chroma_non_intra_quant_matrix)
-        copy_quant_matrix(iq_matrix->chroma_non_intra_quantiser_matrix,
-                          chroma_non_intra_quant_matrix);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceHdr *const seq_hdr = &priv->seq_hdr->data.seq_hdr;
+  VAIQMatrixBufferMPEG2 *iq_matrix;
+  guint8 *intra_quant_matrix = NULL;
+  guint8 *non_intra_quant_matrix = NULL;
+  guint8 *chroma_intra_quant_matrix = NULL;
+  guint8 *chroma_non_intra_quant_matrix = NULL;
+
+  if (!priv->quant_matrix_changed)
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+  priv->quant_matrix_changed = FALSE;
+
+  picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (MPEG2, decoder);
+  if (!picture->iq_matrix) {
+    GST_ERROR ("failed to allocate IQ matrix");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  iq_matrix = picture->iq_matrix->param;
+
+  intra_quant_matrix = seq_hdr->intra_quantizer_matrix;
+  non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
+
+  if (priv->quant_matrix) {
+    GstMpegVideoQuantMatrixExt *const quant_matrix =
+        &priv->quant_matrix->data.quant_matrix;
+    if (quant_matrix->load_intra_quantiser_matrix)
+      intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
+    if (quant_matrix->load_non_intra_quantiser_matrix)
+      non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
+    if (quant_matrix->load_chroma_intra_quantiser_matrix)
+      chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
+    if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
+      chroma_non_intra_quant_matrix =
+          quant_matrix->chroma_non_intra_quantiser_matrix;
+  }
+
+  iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
+  if (intra_quant_matrix)
+    copy_quant_matrix (iq_matrix->intra_quantiser_matrix, intra_quant_matrix);
+
+  iq_matrix->load_non_intra_quantiser_matrix = non_intra_quant_matrix != NULL;
+  if (non_intra_quant_matrix)
+    copy_quant_matrix (iq_matrix->non_intra_quantiser_matrix,
+        non_intra_quant_matrix);
+
+  iq_matrix->load_chroma_intra_quantiser_matrix =
+      chroma_intra_quant_matrix != NULL;
+  if (chroma_intra_quant_matrix)
+    copy_quant_matrix (iq_matrix->chroma_intra_quantiser_matrix,
+        chroma_intra_quant_matrix);
+
+  iq_matrix->load_chroma_non_intra_quantiser_matrix =
+      chroma_non_intra_quant_matrix != NULL;
+  if (chroma_non_intra_quant_matrix)
+    copy_quant_matrix (iq_matrix->chroma_non_intra_quantiser_matrix,
+        chroma_non_intra_quant_matrix);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gboolean
-is_valid_state(GstVaapiDecoderMpeg2 *decoder, guint state)
+is_valid_state (GstVaapiDecoderMpeg2 * decoder, guint state)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    return (priv->state & state) == state;
+  return (priv->state & state) == state;
 }
 
 static GstVaapiDecoderStatus
-decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
+decode_current_picture (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PICTURE))
-        goto drop_frame;
-    priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
-
-    if (!picture)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (!gst_vaapi_picture_decode(picture))
-        goto error;
-    if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
-        if (!gst_vaapi_dpb_add(priv->dpb, picture))
-            goto error;
-        gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    }
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
+
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_PICTURE))
+    goto drop_frame;
+  priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+
+  if (!picture)
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
+  if (!gst_vaapi_picture_decode (picture))
+    goto error;
+  if (GST_VAAPI_PICTURE_IS_COMPLETE (picture)) {
+    if (!gst_vaapi_dpb_add (priv->dpb, picture))
+      goto error;
+    gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
 error:
-    /* XXX: fix for cases where first field failed to be decoded */
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  /* XXX: fix for cases where first field failed to be decoded */
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 
 drop_frame:
-    priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
-    return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+  priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+  return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
 }
 
 static GstVaapiDecoderStatus
-parse_sequence(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_sequence (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceHdr *seq_hdr;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceHdr *seq_hdr;
 
-    priv->state = 0;
+  priv->state = 0;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_hdr)) {
-        GST_ERROR("failed to allocate parser info for sequence header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->seq_hdr)) {
+    GST_ERROR ("failed to allocate parser info for sequence header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    seq_hdr = &priv->seq_hdr->data.seq_hdr;
+  seq_hdr = &priv->seq_hdr->data.seq_hdr;
 
-    if (!gst_mpeg_video_packet_parse_sequence_header(packet, seq_hdr)) {
-        GST_ERROR("failed to parse sequence header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_sequence_header (packet, seq_hdr)) {
+    GST_ERROR ("failed to parse sequence header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, seq_hdr, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, seq_hdr, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+decode_sequence (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceHdr * const seq_hdr = unit->parsed_info;
-
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
-    gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
-
-    priv->fps_n = seq_hdr->fps_n;
-    priv->fps_d = seq_hdr->fps_d;
-    pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
-    gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
-
-    priv->width                 = seq_hdr->width;
-    priv->height                = seq_hdr->height;
-    priv->size_changed          = TRUE;
-    priv->quant_matrix_changed  = TRUE;
-    priv->progressive_sequence  = TRUE;
-
-    priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER_CAST (decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceHdr *const seq_hdr = unit->parsed_info;
+
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_display_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->seq_scalable_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->quant_matrix, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->pic_display_ext, NULL);
+
+  priv->fps_n = seq_hdr->fps_n;
+  priv->fps_d = seq_hdr->fps_d;
+  pts_set_framerate (&priv->tsg, priv->fps_n, priv->fps_d);
+  gst_vaapi_decoder_set_framerate (base_decoder, priv->fps_n, priv->fps_d);
+
+  priv->width = seq_hdr->width;
+  priv->height = seq_hdr->height;
+  priv->size_changed = TRUE;
+  priv->quant_matrix_changed = TRUE;
+  priv->progressive_sequence = TRUE;
+
+  priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_sequence_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceExt *seq_ext;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceExt *seq_ext;
 
-    priv->state &= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
+  priv->state &= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_ext)) {
-        GST_ERROR("failed to allocate parser info for sequence extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->seq_ext)) {
+    GST_ERROR ("failed to allocate parser info for sequence extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    seq_ext = &priv->seq_ext->data.seq_ext;
+  seq_ext = &priv->seq_ext->data.seq_ext;
 
-    if (!gst_mpeg_video_packet_parse_sequence_extension(packet, seq_ext)) {
-        GST_ERROR("failed to parse sequence-extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_sequence_extension (packet, seq_ext)) {
+    GST_ERROR ("failed to parse sequence-extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, seq_ext, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, seq_ext, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+decode_sequence_ext (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceExt * const seq_ext = unit->parsed_info;
-    GstVaapiProfile profile;
-    guint width, height;
-
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    priv->progressive_sequence = seq_ext->progressive;
-    gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
-
-    width  = (priv->width  & 0x0fff) | ((guint32)seq_ext->horiz_size_ext << 12);
-    height = (priv->height & 0x0fff) | ((guint32)seq_ext->vert_size_ext  << 12);
-    GST_DEBUG("video resolution %ux%u", width, height);
-
-    if (seq_ext->fps_n_ext && seq_ext->fps_d_ext) {
-        priv->fps_n *= seq_ext->fps_n_ext + 1;
-        priv->fps_d *= seq_ext->fps_d_ext + 1;
-        pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
-        gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
-    }
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER_CAST (decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceExt *const seq_ext = unit->parsed_info;
+  GstVaapiProfile profile;
+  guint width, height;
 
-    if (priv->width != width) {
-        priv->width = width;
-        priv->size_changed = TRUE;
-    }
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    if (priv->height != height) {
-        priv->height = height;
-        priv->size_changed = TRUE;
-    }
+  priv->progressive_sequence = seq_ext->progressive;
+  gst_vaapi_decoder_set_interlaced (base_decoder, !priv->progressive_sequence);
+
+  width = (priv->width & 0x0fff) | ((guint32) seq_ext->horiz_size_ext << 12);
+  height = (priv->height & 0x0fff) | ((guint32) seq_ext->vert_size_ext << 12);
+  GST_DEBUG ("video resolution %ux%u", width, height);
+
+  if (seq_ext->fps_n_ext && seq_ext->fps_d_ext) {
+    priv->fps_n *= seq_ext->fps_n_ext + 1;
+    priv->fps_d *= seq_ext->fps_d_ext + 1;
+    pts_set_framerate (&priv->tsg, priv->fps_n, priv->fps_d);
+    gst_vaapi_decoder_set_framerate (base_decoder, priv->fps_n, priv->fps_d);
+  }
+
+  if (priv->width != width) {
+    priv->width = width;
+    priv->size_changed = TRUE;
+  }
 
-    switch (seq_ext->profile) {
+  if (priv->height != height) {
+    priv->height = height;
+    priv->size_changed = TRUE;
+  }
+
+  switch (seq_ext->profile) {
     case GST_MPEG_VIDEO_PROFILE_SIMPLE:
-        profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
-        break;
+      profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
+      break;
     case GST_MPEG_VIDEO_PROFILE_MAIN:
-        profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
-        break;
+      profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
+      break;
     case GST_MPEG_VIDEO_PROFILE_HIGH:
-        profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
-        break;
+      profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
+      break;
     default:
-        GST_ERROR("unsupported profile %d", seq_ext->profile);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-    if (priv->profile != profile) {
-        priv->profile = profile;
-        priv->profile_changed = TRUE;
-    }
-
-    priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      GST_ERROR ("unsupported profile %d", seq_ext->profile);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+  if (priv->profile != profile) {
+    priv->profile = profile;
+    priv->profile_changed = TRUE;
+  }
+
+  priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_sequence_display_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceDisplayExt *seq_display_ext;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceDisplayExt *seq_display_ext;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_display_ext)) {
-        GST_ERROR("failed to allocate parser info for sequence display extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->seq_display_ext)) {
+    GST_ERROR ("failed to allocate parser info for sequence display extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    seq_display_ext = &priv->seq_display_ext->data.seq_display_ext;
+  seq_display_ext = &priv->seq_display_ext->data.seq_display_ext;
 
-    if (!gst_mpeg_video_packet_parse_sequence_display_extension(packet,
-            seq_display_ext)) {
-        GST_ERROR("failed to parse sequence-display-extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_sequence_display_extension (packet,
+          seq_display_ext)) {
+    GST_ERROR ("failed to parse sequence-display-extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, seq_display_ext, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, seq_display_ext, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit)
+decode_sequence_display_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceDisplayExt *seq_display_ext;
-
-    seq_display_ext = priv->seq_display_ext ?
-        &priv->seq_display_ext->data.seq_display_ext : NULL;
-
-    /* Update cropping rectangle */
-    if (seq_display_ext) {
-        GstVaapiRectangle * const crop_rect = &priv->crop_rect;
-        crop_rect->x = 0;
-        crop_rect->y = 0;
-        crop_rect->width = seq_display_ext->display_horizontal_size;
-        crop_rect->height = seq_display_ext->display_vertical_size;
-    }
-
-    /* XXX: handle color primaries */
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceDisplayExt *seq_display_ext;
+
+  seq_display_ext = priv->seq_display_ext ?
+      &priv->seq_display_ext->data.seq_display_ext : NULL;
+
+  /* Update cropping rectangle */
+  if (seq_display_ext) {
+    GstVaapiRectangle *const crop_rect = &priv->crop_rect;
+    crop_rect->x = 0;
+    crop_rect->y = 0;
+    crop_rect->width = seq_display_ext->display_horizontal_size;
+    crop_rect->height = seq_display_ext->display_vertical_size;
+  }
+
+  /* XXX: handle color primaries */
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_sequence_scalable_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceScalableExt *seq_scalable_ext;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceScalableExt *seq_scalable_ext;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_scalable_ext)) {
-        GST_ERROR("failed to allocate parser info for sequence scalable extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->seq_scalable_ext)) {
+    GST_ERROR
+        ("failed to allocate parser info for sequence scalable extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    seq_scalable_ext = &priv->seq_scalable_ext->data.seq_scalable_ext;
+  seq_scalable_ext = &priv->seq_scalable_ext->data.seq_scalable_ext;
 
-    if (!gst_mpeg_video_packet_parse_sequence_scalable_extension(packet,
-            seq_scalable_ext)) {
-        GST_ERROR("failed to parse sequence-scalable-extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_sequence_scalable_extension (packet,
+          seq_scalable_ext)) {
+    GST_ERROR ("failed to parse sequence-scalable-extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, seq_scalable_ext, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, seq_scalable_ext, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit)
+decode_sequence_scalable_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    /* XXX: unsupported header -- ignore */
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  /* XXX: unsupported header -- ignore */
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
+decode_sequence_end (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    if (priv->dpb)
-        gst_vaapi_dpb_flush(priv->dpb);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (priv->dpb)
+    gst_vaapi_dpb_flush (priv->dpb);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_quant_matrix_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoQuantMatrixExt *quant_matrix;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoQuantMatrixExt *quant_matrix;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->quant_matrix)) {
-        GST_ERROR("failed to allocate parser info for quantization matrix");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->quant_matrix)) {
+    GST_ERROR ("failed to allocate parser info for quantization matrix");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    quant_matrix = &priv->quant_matrix->data.quant_matrix;
+  quant_matrix = &priv->quant_matrix->data.quant_matrix;
 
-    if (!gst_mpeg_video_packet_parse_quant_matrix_extension(packet,
-            quant_matrix)) {
-        GST_ERROR("failed to parse quant-matrix-extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_quant_matrix_extension (packet,
+          quant_matrix)) {
+    GST_ERROR ("failed to parse quant-matrix-extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, quant_matrix, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, quant_matrix, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit)
+decode_quant_matrix_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    priv->quant_matrix_changed = TRUE;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->quant_matrix_changed = TRUE;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_gop(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_gop (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoGop *gop;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoGop *gop;
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->gop)) {
-        GST_ERROR("failed to allocate parser info for GOP");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->gop)) {
+    GST_ERROR ("failed to allocate parser info for GOP");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    gop = &priv->gop->data.gop;
+  gop = &priv->gop->data.gop;
 
-    if (!gst_mpeg_video_packet_parse_gop(packet, gop)) {
-        GST_ERROR("failed to parse GOP");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_gop (packet, gop)) {
+    GST_ERROR ("failed to parse GOP");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, gop, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, gop, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+decode_gop (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoGop * const gop = unit->parsed_info;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoGop *const gop = unit->parsed_info;
 
-    priv->closed_gop  = gop->closed_gop;
-    priv->broken_link = gop->broken_link;
+  priv->closed_gop = gop->closed_gop;
+  priv->broken_link = gop->broken_link;
 
-    GST_DEBUG("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
-              gop->hour, gop->minute, gop->second, gop->frame,
-              priv->closed_gop, priv->broken_link);
+  GST_DEBUG ("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
+      gop->hour, gop->minute, gop->second, gop->frame,
+      priv->closed_gop, priv->broken_link);
 
-    pts_sync(&priv->tsg, GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  pts_sync (&priv->tsg, GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_picture(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_picture (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoPictureHdr *pic_hdr;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoPictureHdr *pic_hdr;
 
-    priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
-                    GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT);
+  priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
+      GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT);
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_hdr)) {
-        GST_ERROR("failed to allocate parser info for picture header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->pic_hdr)) {
+    GST_ERROR ("failed to allocate parser info for picture header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    pic_hdr = &priv->pic_hdr->data.pic_hdr;
+  pic_hdr = &priv->pic_hdr->data.pic_hdr;
 
-    if (!gst_mpeg_video_packet_parse_picture_header(packet, pic_hdr)) {
-        GST_ERROR("failed to parse picture header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_picture_header (packet, pic_hdr)) {
+    GST_ERROR ("failed to parse picture header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, pic_hdr, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, pic_hdr, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+decode_picture (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
-    gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
+  gst_vaapi_parser_info_mpeg2_replace (&priv->pic_ext, NULL);
 
-    priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_HDR;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_HDR;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-parse_picture_ext(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_picture_ext (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoPictureExt *pic_ext;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoPictureExt *pic_ext;
 
-    priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
-                    GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
-                    GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
+  priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
+      GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT | GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
 
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_ext)) {
-        GST_ERROR("failed to allocate parser info for picture extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->pic_ext)) {
+    GST_ERROR ("failed to allocate parser info for picture extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    pic_ext = &priv->pic_ext->data.pic_ext;
+  pic_ext = &priv->pic_ext->data.pic_ext;
 
-    if (!gst_mpeg_video_packet_parse_picture_extension(packet, pic_ext)) {
-        GST_ERROR("failed to parse picture-extension");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  if (!gst_mpeg_video_packet_parse_picture_extension (packet, pic_ext)) {
+    GST_ERROR ("failed to parse picture-extension");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    gst_vaapi_decoder_unit_set_parsed_info(unit, pic_ext, NULL);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, pic_ext, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_picture_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+decode_picture_ext (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoPictureExt * const pic_ext = unit->parsed_info;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoPictureExt *const pic_ext = unit->parsed_info;
 
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_PIC_HDR))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (priv->progressive_sequence && !pic_ext->progressive_frame) {
-        GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
-        pic_ext->progressive_frame = 1;
-    }
-
-    if (pic_ext->picture_structure == 0 ||
-        (pic_ext->progressive_frame &&
-         pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
-        GST_WARNING("invalid picture_structure %d, replacing with \"frame\"",
-                    pic_ext->picture_structure);
-        pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
-    }
-
-    priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_EXT;
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_GOT_PIC_HDR))
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+  if (priv->progressive_sequence && !pic_ext->progressive_frame) {
+    GST_WARNING ("invalid interlaced frame in progressive sequence, fixing");
+    pic_ext->progressive_frame = 1;
+  }
+
+  if (pic_ext->picture_structure == 0 ||
+      (pic_ext->progressive_frame &&
+          pic_ext->picture_structure !=
+          GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
+    GST_WARNING ("invalid picture_structure %d, replacing with \"frame\"",
+        pic_ext->picture_structure);
+    pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
+  }
+
+  priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_EXT;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline guint32
-pack_f_code(guint8 f_code[2][2])
+pack_f_code (guint8 f_code[2][2])
 {
-    return (((guint32)f_code[0][0] << 12) |
-            ((guint32)f_code[0][1] <<  8) |
-            ((guint32)f_code[1][0] <<  4) |
-            (         f_code[1][1]      ));
+  return (((guint32) f_code[0][0] << 12) |
+      ((guint32) f_code[0][1] << 8) |
+      ((guint32) f_code[1][0] << 4) | (f_code[1][1]));
 }
 
 static GstVaapiDecoderStatus
-init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
+init_picture (GstVaapiDecoderMpeg2 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
-    GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoPictureHdr *const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+  GstMpegVideoPictureExt *const pic_ext = &priv->pic_ext->data.pic_ext;
 
-    switch (pic_hdr->pic_type) {
+  switch (pic_hdr->pic_type) {
     case GST_MPEG_VIDEO_PICTURE_TYPE_I:
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        picture->type = GST_VAAPI_PICTURE_TYPE_I;
-        break;
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      picture->type = GST_VAAPI_PICTURE_TYPE_I;
+      break;
     case GST_MPEG_VIDEO_PICTURE_TYPE_P:
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        picture->type = GST_VAAPI_PICTURE_TYPE_P;
-        break;
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      picture->type = GST_VAAPI_PICTURE_TYPE_P;
+      break;
     case GST_MPEG_VIDEO_PICTURE_TYPE_B:
-        picture->type = GST_VAAPI_PICTURE_TYPE_B;
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_B;
+      break;
     default:
-        GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+      GST_ERROR ("unsupported picture type %d", pic_hdr->pic_type);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
-        if (pic_ext->top_field_first)
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_TFF);
-    }
+  if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
+    GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
+    if (pic_ext->top_field_first)
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_TFF);
+  }
 
-    switch (pic_ext->picture_structure) {
+  switch (pic_ext->picture_structure) {
     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD:
-        picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
-        break;
+      picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
+      break;
     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_BOTTOM_FIELD:
-        picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
-        break;
+      picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
+      break;
     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME:
-        picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
-        break;
+      picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+      break;
+  }
+
+  /* Allocate dummy picture for first field based I-frame */
+  if (picture->type == GST_VAAPI_PICTURE_TYPE_I &&
+      !GST_VAAPI_PICTURE_IS_FRAME (picture) &&
+      gst_vaapi_dpb_size (priv->dpb) == 0) {
+    GstVaapiPicture *dummy_picture;
+    gboolean success;
+
+    dummy_picture = GST_VAAPI_PICTURE_NEW (MPEG2, decoder);
+    if (!dummy_picture) {
+      GST_ERROR ("failed to allocate dummy picture");
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
     }
 
-    /* Allocate dummy picture for first field based I-frame */
-    if (picture->type == GST_VAAPI_PICTURE_TYPE_I &&
-        !GST_VAAPI_PICTURE_IS_FRAME(picture) &&
-        gst_vaapi_dpb_size(priv->dpb) == 0) {
-        GstVaapiPicture *dummy_picture;
-        gboolean success;
-
-        dummy_picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
-        if (!dummy_picture) {
-            GST_ERROR("failed to allocate dummy picture");
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-
-        dummy_picture->type      = GST_VAAPI_PICTURE_TYPE_I;
-        dummy_picture->pts       = GST_CLOCK_TIME_NONE;
-        dummy_picture->poc       = -1;
-        dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+    dummy_picture->type = GST_VAAPI_PICTURE_TYPE_I;
+    dummy_picture->pts = GST_CLOCK_TIME_NONE;
+    dummy_picture->poc = -1;
+    dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
 
-        GST_VAAPI_PICTURE_FLAG_SET(
-            dummy_picture,
-            (GST_VAAPI_PICTURE_FLAG_SKIPPED |
-             GST_VAAPI_PICTURE_FLAG_OUTPUT  |
-             GST_VAAPI_PICTURE_FLAG_REFERENCE)
+    GST_VAAPI_PICTURE_FLAG_SET (dummy_picture,
+        (GST_VAAPI_PICTURE_FLAG_SKIPPED |
+            GST_VAAPI_PICTURE_FLAG_OUTPUT | GST_VAAPI_PICTURE_FLAG_REFERENCE)
         );
 
-        success = gst_vaapi_dpb_add(priv->dpb, dummy_picture);
-        gst_vaapi_picture_unref(dummy_picture);
-        if (!success) {
-            GST_ERROR("failed to add dummy picture into DPB");
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-        }
-        GST_INFO("allocated dummy picture for first field based I-frame");
+    success = gst_vaapi_dpb_add (priv->dpb, dummy_picture);
+    gst_vaapi_picture_unref (dummy_picture);
+    if (!success) {
+      GST_ERROR ("failed to add dummy picture into DPB");
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
     }
-
-    /* Update presentation time */
-    picture->pts = pts_eval(&priv->tsg,
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
-    picture->poc = pts_get_poc(&priv->tsg);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    GST_INFO ("allocated dummy picture for first field based I-frame");
+  }
+
+  /* Update presentation time */
+  picture->pts = pts_eval (&priv->tsg,
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts, pic_hdr->tsn);
+  picture->poc = pts_get_poc (&priv->tsg);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
+fill_picture (GstVaapiDecoderMpeg2 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
-    GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
-    GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
-    GstVaapiPicture *prev_picture, *next_picture;
-
-    /* Fill in VAPictureParameterBufferMPEG2 */
-    pic_param->horizontal_size            = priv->width;
-    pic_param->vertical_size              = priv->height;
-    pic_param->forward_reference_picture  = VA_INVALID_ID;
-    pic_param->backward_reference_picture = VA_INVALID_ID;
-    pic_param->picture_coding_type        = pic_hdr->pic_type;
-    pic_param->f_code                     = pack_f_code(pic_ext->f_code);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  VAPictureParameterBufferMPEG2 *const pic_param = picture->param;
+  GstMpegVideoPictureHdr *const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+  GstMpegVideoPictureExt *const pic_ext = &priv->pic_ext->data.pic_ext;
+  GstVaapiPicture *prev_picture, *next_picture;
+
+  /* Fill in VAPictureParameterBufferMPEG2 */
+  pic_param->horizontal_size = priv->width;
+  pic_param->vertical_size = priv->height;
+  pic_param->forward_reference_picture = VA_INVALID_ID;
+  pic_param->backward_reference_picture = VA_INVALID_ID;
+  pic_param->picture_coding_type = pic_hdr->pic_type;
+  pic_param->f_code = pack_f_code (pic_ext->f_code);
 
 #define COPY_FIELD(a, b, f) \
     pic_param->a.b.f = pic_ext->f
-    pic_param->picture_coding_extension.value = 0;
-    pic_param->picture_coding_extension.bits.is_first_field =
-        GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
-    COPY_FIELD(picture_coding_extension, bits, intra_dc_precision);
-    COPY_FIELD(picture_coding_extension, bits, picture_structure);
-    COPY_FIELD(picture_coding_extension, bits, top_field_first);
-    COPY_FIELD(picture_coding_extension, bits, frame_pred_frame_dct);
-    COPY_FIELD(picture_coding_extension, bits, concealment_motion_vectors);
-    COPY_FIELD(picture_coding_extension, bits, q_scale_type);
-    COPY_FIELD(picture_coding_extension, bits, intra_vlc_format);
-    COPY_FIELD(picture_coding_extension, bits, alternate_scan);
-    COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
-    COPY_FIELD(picture_coding_extension, bits, progressive_frame);
-
-    gst_vaapi_dpb_get_neighbours(priv->dpb, picture,
-        &prev_picture, &next_picture);
-
-    switch (pic_hdr->pic_type) {
+  pic_param->picture_coding_extension.value = 0;
+  pic_param->picture_coding_extension.bits.is_first_field =
+      GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture);
+  COPY_FIELD (picture_coding_extension, bits, intra_dc_precision);
+  COPY_FIELD (picture_coding_extension, bits, picture_structure);
+  COPY_FIELD (picture_coding_extension, bits, top_field_first);
+  COPY_FIELD (picture_coding_extension, bits, frame_pred_frame_dct);
+  COPY_FIELD (picture_coding_extension, bits, concealment_motion_vectors);
+  COPY_FIELD (picture_coding_extension, bits, q_scale_type);
+  COPY_FIELD (picture_coding_extension, bits, intra_vlc_format);
+  COPY_FIELD (picture_coding_extension, bits, alternate_scan);
+  COPY_FIELD (picture_coding_extension, bits, repeat_first_field);
+  COPY_FIELD (picture_coding_extension, bits, progressive_frame);
+
+  gst_vaapi_dpb_get_neighbours (priv->dpb, picture,
+      &prev_picture, &next_picture);
+
+  switch (pic_hdr->pic_type) {
     case GST_MPEG_VIDEO_PICTURE_TYPE_B:
-        if (next_picture)
-            pic_param->backward_reference_picture = next_picture->surface_id;
-        if (prev_picture)
-            pic_param->forward_reference_picture = prev_picture->surface_id;
-        else if (!priv->closed_gop)
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
-        break;
+      if (next_picture)
+        pic_param->backward_reference_picture = next_picture->surface_id;
+      if (prev_picture)
+        pic_param->forward_reference_picture = prev_picture->surface_id;
+      else if (!priv->closed_gop)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
+      break;
     case GST_MPEG_VIDEO_PICTURE_TYPE_P:
-        if (prev_picture)
-            pic_param->forward_reference_picture = prev_picture->surface_id;
-        break;
-    }
+      if (prev_picture)
+        pic_param->forward_reference_picture = prev_picture->surface_id;
+      break;
+  }
 }
 
 static GstVaapiDecoderStatus
-parse_slice(GstVaapiDecoderMpeg2 *decoder,
-    GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+parse_slice (GstVaapiDecoderMpeg2 * decoder,
+    GstVaapiDecoderUnit * unit, const GstMpegVideoPacket * packet)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSliceHdr *slice_hdr;
-    GstMpegVideoSequenceHdr *seq_hdr;
-    GstMpegVideoSequenceScalableExt *seq_scalable_ext;
-
-    priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
-                    GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
-                    GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
-                    GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
-
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->slice_hdr)) {
-        GST_ERROR("failed to allocate parser info for slice header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSliceHdr *slice_hdr;
+  GstMpegVideoSequenceHdr *seq_hdr;
+  GstMpegVideoSequenceScalableExt *seq_scalable_ext;
 
-    slice_hdr = &priv->slice_hdr->data.slice_hdr;
-    seq_hdr = &priv->seq_hdr->data.seq_hdr;
-    seq_scalable_ext = priv->seq_scalable_ext ?
-        &priv->seq_scalable_ext->data.seq_scalable_ext : NULL;
+  priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR |
+      GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT |
+      GST_MPEG_VIDEO_STATE_GOT_PIC_HDR | GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
 
-    if (!gst_mpeg_video_packet_parse_slice_header(packet, slice_hdr,
-            seq_hdr, seq_scalable_ext)) {
-        GST_ERROR("failed to parse slice header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-
-    gst_vaapi_decoder_unit_set_parsed_info(unit, slice_hdr, NULL);
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
-}
 
-static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
-{
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-    GstVaapiSlice *slice;
-    VASliceParameterBufferMPEG2 *slice_param;
-    GstMpegVideoSliceHdr * const slice_hdr = unit->parsed_info;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!gst_vaapi_parser_info_mpeg2_ensure (&priv->slice_hdr)) {
+    GST_ERROR ("failed to allocate parser info for slice header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
 
-    GST_DEBUG("slice %d (%u bytes)", slice_hdr->mb_row, unit->size);
+  slice_hdr = &priv->slice_hdr->data.slice_hdr;
+  seq_hdr = &priv->seq_hdr->data.seq_hdr;
+  seq_scalable_ext = priv->seq_scalable_ext ?
+      &priv->seq_scalable_ext->data.seq_scalable_ext : NULL;
 
-    slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder,
-        (map_info.data + unit->offset), unit->size);
-    gst_buffer_unmap(buffer, &map_info);
-    if (!slice) {
-        GST_ERROR("failed to allocate slice");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_add_slice(picture, slice);
+  if (!gst_mpeg_video_packet_parse_slice_header (packet, slice_hdr,
+          seq_hdr, seq_scalable_ext)) {
+    GST_ERROR ("failed to parse slice header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    /* Fill in VASliceParameterBufferMPEG2 */
-    slice_param                            = slice->param;
-    slice_param->macroblock_offset         = slice_hdr->header_size + 32;
-    slice_param->slice_horizontal_position = slice_hdr->mb_column;
-    slice_param->slice_vertical_position   = slice_hdr->mb_row;
-    slice_param->quantiser_scale_code      = slice_hdr->quantiser_scale_code;
-    slice_param->intra_slice_flag          = slice_hdr->intra_slice;
+  gst_vaapi_decoder_unit_set_parsed_info (unit, slice_hdr, NULL);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
 
-    priv->state |= GST_MPEG_VIDEO_STATE_GOT_SLICE;
+static GstVaapiDecoderStatus
+decode_slice (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
+  GstVaapiSlice *slice;
+  VASliceParameterBufferMPEG2 *slice_param;
+  GstMpegVideoSliceHdr *const slice_hdr = unit->parsed_info;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
+
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+
+  GST_DEBUG ("slice %d (%u bytes)", slice_hdr->mb_row, unit->size);
+
+  slice = GST_VAAPI_SLICE_NEW (MPEG2, decoder,
+      (map_info.data + unit->offset), unit->size);
+  gst_buffer_unmap (buffer, &map_info);
+  if (!slice) {
+    GST_ERROR ("failed to allocate slice");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_add_slice (picture, slice);
+
+  /* Fill in VASliceParameterBufferMPEG2 */
+  slice_param = slice->param;
+  slice_param->macroblock_offset = slice_hdr->header_size + 32;
+  slice_param->slice_horizontal_position = slice_hdr->mb_column;
+  slice_param->slice_vertical_position = slice_hdr->mb_row;
+  slice_param->quantiser_scale_code = slice_hdr->quantiser_scale_code;
+  slice_param->intra_slice_flag = slice_hdr->intra_slice;
+
+  priv->state |= GST_MPEG_VIDEO_STATE_GOT_SLICE;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gint
-scan_for_start_code(const guchar *buf, guint buf_size,
-    GstMpegVideoPacketTypeCode *type_ptr)
+scan_for_start_code (const guchar * buf, guint buf_size,
+    GstMpegVideoPacketTypeCode * type_ptr)
 {
-    guint i = 0;
-
-    while (i <= (buf_size - 4)) {
-        if (buf[i + 2] > 1)
-            i += 3;
-        else if (buf[i + 1])
-            i += 2;
-        else if (buf[i] || buf[i + 2] != 1)
-            i++;
-        else
-            break;
-    }
-
-    if (i <= (buf_size - 4)) {
-        if (type_ptr)
-            *type_ptr = buf[i + 3];
-        return i;
-    }
-    return -1;
+  guint i = 0;
+
+  while (i <= (buf_size - 4)) {
+    if (buf[i + 2] > 1)
+      i += 3;
+    else if (buf[i + 1])
+      i += 2;
+    else if (buf[i] || buf[i + 2] != 1)
+      i++;
+    else
+      break;
+  }
+
+  if (i <= (buf_size - 4)) {
+    if (type_ptr)
+      *type_ptr = buf[i + 3];
+    return i;
+  }
+  return -1;
 }
 
 static GstVaapiDecoderStatus
-parse_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
-    GstMpegVideoPacket *packet)
+parse_unit (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit,
+    GstMpegVideoPacket * packet)
 {
-    GstMpegVideoPacketTypeCode type;
-    GstMpegVideoPacketExtensionCode ext_type;
-    GstVaapiDecoderStatus status;
+  GstMpegVideoPacketTypeCode type;
+  GstMpegVideoPacketExtensionCode ext_type;
+  GstVaapiDecoderStatus status;
 
-    type = packet->type;
-    switch (type) {
+  type = packet->type;
+  switch (type) {
     case GST_MPEG_VIDEO_PACKET_PICTURE:
-        status = parse_picture(decoder, unit, packet);
-        break;
+      status = parse_picture (decoder, unit, packet);
+      break;
     case GST_MPEG_VIDEO_PACKET_SEQUENCE:
-        status = parse_sequence(decoder, unit, packet);
-        break;
+      status = parse_sequence (decoder, unit, packet);
+      break;
     case GST_MPEG_VIDEO_PACKET_EXTENSION:
-        ext_type = packet->data[4] >> 4;
-        switch (ext_type) {
+      ext_type = packet->data[4] >> 4;
+      switch (ext_type) {
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
-            status = parse_sequence_ext(decoder, unit, packet);
-            break;
+          status = parse_sequence_ext (decoder, unit, packet);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
-            status = parse_sequence_display_ext(decoder, unit, packet);
-            break;
+          status = parse_sequence_display_ext (decoder, unit, packet);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
-            status = parse_sequence_scalable_ext(decoder, unit, packet);
-            break;
+          status = parse_sequence_scalable_ext (decoder, unit, packet);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
-            status = parse_quant_matrix_ext(decoder, unit, packet);
-            break;
+          status = parse_quant_matrix_ext (decoder, unit, packet);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
-            status = parse_picture_ext(decoder, unit, packet);
-            break;
+          status = parse_picture_ext (decoder, unit, packet);
+          break;
         default:
-            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-            break;
-        }
-        break;
+          status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+          break;
+      }
+      break;
     case GST_MPEG_VIDEO_PACKET_GOP:
-        status = parse_gop(decoder, unit, packet);
-        break;
+      status = parse_gop (decoder, unit, packet);
+      break;
     default:
-        if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
-            type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
-            status = parse_slice(decoder, unit, packet);
-            break;
-        }
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+          type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+        status = parse_slice (decoder, unit, packet);
         break;
-    }
-    return status;
+      }
+      status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      break;
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
-    GstMpegVideoPacket *packet)
+decode_unit (GstVaapiDecoderMpeg2 * decoder, GstVaapiDecoderUnit * unit,
+    GstMpegVideoPacket * packet)
 {
-    GstMpegVideoPacketTypeCode type;
-    GstMpegVideoPacketExtensionCode ext_type;
-    GstVaapiDecoderStatus status;
+  GstMpegVideoPacketTypeCode type;
+  GstMpegVideoPacketExtensionCode ext_type;
+  GstVaapiDecoderStatus status;
 
-    type = packet->type;
-    switch (type) {
+  type = packet->type;
+  switch (type) {
     case GST_MPEG_VIDEO_PACKET_PICTURE:
-        status = decode_picture(decoder, unit);
-        break;
+      status = decode_picture (decoder, unit);
+      break;
     case GST_MPEG_VIDEO_PACKET_SEQUENCE:
-        status = decode_sequence(decoder, unit);
-        break;
+      status = decode_sequence (decoder, unit);
+      break;
     case GST_MPEG_VIDEO_PACKET_EXTENSION:
-        ext_type = packet->data[4] >> 4;
-        switch (ext_type) {
+      ext_type = packet->data[4] >> 4;
+      switch (ext_type) {
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
-            status = decode_sequence_ext(decoder, unit);
-            break;
+          status = decode_sequence_ext (decoder, unit);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
-            status = decode_sequence_display_ext(decoder, unit);
-            break;
+          status = decode_sequence_display_ext (decoder, unit);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
-            status = decode_sequence_scalable_ext(decoder, unit);
-            break;
+          status = decode_sequence_scalable_ext (decoder, unit);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
-            status = decode_quant_matrix_ext(decoder, unit);
-            break;
+          status = decode_quant_matrix_ext (decoder, unit);
+          break;
         case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
-            status = decode_picture_ext(decoder, unit);
-            break;
+          status = decode_picture_ext (decoder, unit);
+          break;
         default:
-            // Ignore unknown start-code extensions
-            GST_WARNING("unsupported packet extension type 0x%02x", ext_type);
-            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-            break;
-        }
-        break;
+          // Ignore unknown start-code extensions
+          GST_WARNING ("unsupported packet extension type 0x%02x", ext_type);
+          status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+          break;
+      }
+      break;
     case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
-        status = decode_sequence_end(decoder);
-        break;
+      status = decode_sequence_end (decoder);
+      break;
     case GST_MPEG_VIDEO_PACKET_GOP:
-        status = decode_gop(decoder, unit);
-        break;
+      status = decode_gop (decoder, unit);
+      break;
     default:
-        if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
-            type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
-            status = decode_slice(decoder, unit);
-            break;
-        }
-        GST_WARNING("unsupported packet type 0x%02x", type);
-        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+          type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+        status = decode_slice (decoder, unit);
         break;
-    }
-    return status;
+      }
+      GST_WARNING ("unsupported packet type 0x%02x", type);
+      status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-ensure_decoder(GstVaapiDecoderMpeg2 *decoder)
+ensure_decoder (GstVaapiDecoderMpeg2 * decoder)
 {
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-
-    if (!priv->is_opened) {
-        priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder);
-        if (!priv->is_opened)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+
+  if (!priv->is_opened) {
+    priv->is_opened = gst_vaapi_decoder_mpeg2_open (decoder);
+    if (!priv->is_opened)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
-    GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_mpeg2_parse (GstVaapiDecoder * base_decoder,
+    GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
-    GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstMpegVideoPacketTypeCode type, type2 = GST_MPEG_VIDEO_PACKET_NONE;
-    const guchar *buf;
-    guint buf_size, flags;
-    gint ofs, ofs1, ofs2;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    buf_size = gst_adapter_available(adapter);
-    if (buf_size < 4)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-    buf = gst_adapter_map(adapter, buf_size);
-    if (!buf)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-    ofs = scan_for_start_code(buf, buf_size, &type);
-    if (ofs < 0)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-    ofs1 = ofs;
-
-    ofs2 = ps->input_offset2 - 4;
-    if (ofs2 < ofs1 + 4)
-        ofs2 = ofs1 + 4;
-
-    ofs = G_UNLIKELY(buf_size < ofs2 + 4) ? -1 :
-        scan_for_start_code(&buf[ofs2], buf_size - ofs2, &type2);
-    if (ofs < 0) {
-        // Assume the whole packet is present if end-of-stream
-        if (!at_eos) {
-            ps->input_offset2 = buf_size;
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        }
-        ofs = buf_size - ofs2;
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
+  GstVaapiParserState *const ps = GST_VAAPI_PARSER_STATE (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstMpegVideoPacketTypeCode type, type2 = GST_MPEG_VIDEO_PACKET_NONE;
+  const guchar *buf;
+  guint buf_size, flags;
+  gint ofs, ofs1, ofs2;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+
+  buf_size = gst_adapter_available (adapter);
+  if (buf_size < 4)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  buf = gst_adapter_map (adapter, buf_size);
+  if (!buf)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  ofs = scan_for_start_code (buf, buf_size, &type);
+  if (ofs < 0)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+  ofs1 = ofs;
+
+  ofs2 = ps->input_offset2 - 4;
+  if (ofs2 < ofs1 + 4)
+    ofs2 = ofs1 + 4;
+
+  ofs = G_UNLIKELY (buf_size < ofs2 + 4) ? -1 :
+      scan_for_start_code (&buf[ofs2], buf_size - ofs2, &type2);
+  if (ofs < 0) {
+    // Assume the whole packet is present if end-of-stream
+    if (!at_eos) {
+      ps->input_offset2 = buf_size;
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
     }
-    ofs2 += ofs;
+    ofs = buf_size - ofs2;
+  }
+  ofs2 += ofs;
 
-    unit->size = ofs2 - ofs1;
-    gst_adapter_flush(adapter, ofs1);
-    ps->input_offset2 = 4;
+  unit->size = ofs2 - ofs1;
+  gst_adapter_flush (adapter, ofs1);
+  ps->input_offset2 = 4;
 
-    /* Check for start of new picture */
-    flags = 0;
-    switch (type) {
+  /* Check for start of new picture */
+  flags = 0;
+  switch (type) {
     case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+      break;
     case GST_MPEG_VIDEO_PACKET_USER_DATA:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        /* fall-through */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      /* fall-through */
     case GST_MPEG_VIDEO_PACKET_SEQUENCE:
     case GST_MPEG_VIDEO_PACKET_GOP:
     case GST_MPEG_VIDEO_PACKET_PICTURE:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
     case GST_MPEG_VIDEO_PACKET_EXTENSION:
-        if (G_UNLIKELY(unit->size < 5))
-            return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        break;
+      if (G_UNLIKELY (unit->size < 5))
+        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
     default:
-        if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
-            type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-            switch (type2) {
-            case GST_MPEG_VIDEO_PACKET_USER_DATA:
-            case GST_MPEG_VIDEO_PACKET_SEQUENCE:
-            case GST_MPEG_VIDEO_PACKET_GOP:
-            case GST_MPEG_VIDEO_PACKET_PICTURE:
-                flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-                break;
-            default:
-                break;
-            }
+      if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+          type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+        switch (type2) {
+          case GST_MPEG_VIDEO_PACKET_USER_DATA:
+          case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+          case GST_MPEG_VIDEO_PACKET_GOP:
+          case GST_MPEG_VIDEO_PACKET_PICTURE:
+            flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+            break;
+          default:
+            break;
         }
-
-        // Ignore system start codes (PES headers)
-        else if (type >= 0xb9 && type <= 0xff)
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
-    }
-    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      }
+      // Ignore system start codes (PES headers)
+      else if (type >= 0xb9 && type <= 0xff)
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
+  }
+  GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_decode(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_mpeg2_decode (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstMpegVideoPacket packet;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstMpegVideoPacket packet;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    packet.data = map_info.data + unit->offset;
-    packet.size = unit->size;
-    packet.type = packet.data[3];
-    packet.offset = 4;
+  packet.data = map_info.data + unit->offset;
+  packet.size = unit->size;
+  packet.type = packet.data[3];
+  packet.offset = 4;
 
-    status = parse_unit(decoder, unit, &packet);
-    gst_buffer_unmap(buffer, &map_info);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-    return decode_unit(decoder, unit, &packet);
+  status = parse_unit (decoder, unit, &packet);
+  gst_buffer_unmap (buffer, &map_info);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+  return decode_unit (decoder, unit, &packet);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *base_unit)
+gst_vaapi_decoder_mpeg2_start_frame (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * base_unit)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
-    GstMpegVideoSequenceHdr *seq_hdr;
-    GstMpegVideoSequenceExt *seq_ext;
-    GstMpegVideoSequenceDisplayExt *seq_display_ext;
-    GstVaapiPicture *picture;
-    GstVaapiDecoderStatus status;
-
-    if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-    priv->state &= ~GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
-
-    seq_hdr = &priv->seq_hdr->data.seq_hdr;
-    seq_ext = priv->seq_ext ? &priv->seq_ext->data.seq_ext : NULL;
-    seq_display_ext = priv->seq_display_ext ?
-        &priv->seq_display_ext->data.seq_display_ext : NULL;
-    if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext,
-            seq_display_ext))
-        gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
-            seq_hdr->par_w, seq_hdr->par_h);
-
-    status = ensure_context(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_ERROR("failed to reset context");
-        return status;
-    }
-
-    if (priv->current_picture) {
-        /* Re-use current picture where the first field was decoded */
-        picture = gst_vaapi_picture_new_field(priv->current_picture);
-        if (!picture) {
-            GST_ERROR("failed to allocate field picture");
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-    }
-    else {
-        /* Create new picture */
-        picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
-        if (!picture) {
-            GST_ERROR("failed to allocate picture");
-            return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-        }
-    }
-    gst_vaapi_picture_replace(&priv->current_picture, picture);
-    gst_vaapi_picture_unref(picture);
-
-    /* Update cropping rectangle */
-    /* XXX: handle picture_display_extension() */
-    if (seq_display_ext && priv->pic_display_ext) {
-        GstVaapiRectangle * const crop_rect = &priv->crop_rect;
-        if (crop_rect->x + crop_rect->width <= priv->width &&
-            crop_rect->y + crop_rect->height <= priv->height)
-            gst_vaapi_picture_set_crop_rect(picture, crop_rect);
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
+  GstMpegVideoSequenceHdr *seq_hdr;
+  GstMpegVideoSequenceExt *seq_ext;
+  GstMpegVideoSequenceDisplayExt *seq_display_ext;
+  GstVaapiPicture *picture;
+  GstVaapiDecoderStatus status;
+
+  if (!is_valid_state (decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
+    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->state &= ~GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+
+  seq_hdr = &priv->seq_hdr->data.seq_hdr;
+  seq_ext = priv->seq_ext ? &priv->seq_ext->data.seq_ext : NULL;
+  seq_display_ext = priv->seq_display_ext ?
+      &priv->seq_display_ext->data.seq_display_ext : NULL;
+  if (gst_mpeg_video_finalise_mpeg2_sequence_header (seq_hdr, seq_ext,
+          seq_display_ext))
+    gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder,
+        seq_hdr->par_w, seq_hdr->par_h);
+
+  status = ensure_context (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_ERROR ("failed to reset context");
+    return status;
+  }
+
+  if (priv->current_picture) {
+    /* Re-use current picture where the first field was decoded */
+    picture = gst_vaapi_picture_new_field (priv->current_picture);
+    if (!picture) {
+      GST_ERROR ("failed to allocate field picture");
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
     }
-
-    status = ensure_quant_matrix(decoder, picture);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_ERROR("failed to reset quantizer matrix");
-        return status;
+  } else {
+    /* Create new picture */
+    picture = GST_VAAPI_PICTURE_NEW (MPEG2, decoder);
+    if (!picture) {
+      GST_ERROR ("failed to allocate picture");
+      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
     }
+  }
+  gst_vaapi_picture_replace (&priv->current_picture, picture);
+  gst_vaapi_picture_unref (picture);
+
+  /* Update cropping rectangle */
+  /* XXX: handle picture_display_extension() */
+  if (seq_display_ext && priv->pic_display_ext) {
+    GstVaapiRectangle *const crop_rect = &priv->crop_rect;
+    if (crop_rect->x + crop_rect->width <= priv->width &&
+        crop_rect->y + crop_rect->height <= priv->height)
+      gst_vaapi_picture_set_crop_rect (picture, crop_rect);
+  }
+
+  status = ensure_quant_matrix (decoder, picture);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_ERROR ("failed to reset quantizer matrix");
+    return status;
+  }
 
-    status = init_picture(decoder, picture);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  status = init_picture (decoder, picture);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    fill_picture(decoder, picture);
+  fill_picture (decoder, picture);
 
-    priv->state |= GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  priv->state |= GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_end_frame(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg2_end_frame (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
 
-    return decode_current_picture(decoder);
+  return decode_current_picture (decoder);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_flush(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg2_flush (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg2 * const decoder =
-        GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
-    GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg2 *const decoder =
+      GST_VAAPI_DECODER_MPEG2_CAST (base_decoder);
+  GstVaapiDecoderMpeg2Private *const priv = &decoder->priv;
 
-    if (priv->dpb)
-      gst_vaapi_dpb_flush(priv->dpb);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (priv->dpb)
+    gst_vaapi_dpb_flush (priv->dpb);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-gst_vaapi_decoder_mpeg2_class_init(GstVaapiDecoderMpeg2Class *klass)
+gst_vaapi_decoder_mpeg2_class_init (GstVaapiDecoderMpeg2Class * klass)
 {
-    GstVaapiMiniObjectClass * const object_class =
-        GST_VAAPI_MINI_OBJECT_CLASS(klass);
-    GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
-
-    object_class->size          = sizeof(GstVaapiDecoderMpeg2);
-    object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
-
-    decoder_class->create       = gst_vaapi_decoder_mpeg2_create;
-    decoder_class->destroy      = gst_vaapi_decoder_mpeg2_destroy;
-    decoder_class->parse        = gst_vaapi_decoder_mpeg2_parse;
-    decoder_class->decode       = gst_vaapi_decoder_mpeg2_decode;
-    decoder_class->start_frame  = gst_vaapi_decoder_mpeg2_start_frame;
-    decoder_class->end_frame    = gst_vaapi_decoder_mpeg2_end_frame;
-    decoder_class->flush        = gst_vaapi_decoder_mpeg2_flush;
+  GstVaapiMiniObjectClass *const object_class =
+      GST_VAAPI_MINI_OBJECT_CLASS (klass);
+  GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
+
+  object_class->size = sizeof (GstVaapiDecoderMpeg2);
+  object_class->finalize = (GDestroyNotify) gst_vaapi_decoder_finalize;
+
+  decoder_class->create = gst_vaapi_decoder_mpeg2_create;
+  decoder_class->destroy = gst_vaapi_decoder_mpeg2_destroy;
+  decoder_class->parse = gst_vaapi_decoder_mpeg2_parse;
+  decoder_class->decode = gst_vaapi_decoder_mpeg2_decode;
+  decoder_class->start_frame = gst_vaapi_decoder_mpeg2_start_frame;
+  decoder_class->end_frame = gst_vaapi_decoder_mpeg2_end_frame;
+  decoder_class->flush = gst_vaapi_decoder_mpeg2_flush;
 }
 
 static inline const GstVaapiDecoderClass *
-gst_vaapi_decoder_mpeg2_class(void)
+gst_vaapi_decoder_mpeg2_class (void)
 {
-    static GstVaapiDecoderMpeg2Class g_class;
-    static gsize g_class_init = FALSE;
-
-    if (g_once_init_enter(&g_class_init)) {
-        gst_vaapi_decoder_mpeg2_class_init(&g_class);
-        g_once_init_leave(&g_class_init, TRUE);
-    }
-    return GST_VAAPI_DECODER_CLASS(&g_class);
+  static GstVaapiDecoderMpeg2Class g_class;
+  static gsize g_class_init = FALSE;
+
+  if (g_once_init_enter (&g_class_init)) {
+    gst_vaapi_decoder_mpeg2_class_init (&g_class);
+    g_once_init_leave (&g_class_init, TRUE);
+  }
+  return GST_VAAPI_DECODER_CLASS (&g_class);
 }
 
 /**
@@ -1596,8 +1600,8 @@ gst_vaapi_decoder_mpeg2_class(void)
  * Return value: the newly allocated #GstVaapiDecoder object
  */
 GstVaapiDecoder *
-gst_vaapi_decoder_mpeg2_new(GstVaapiDisplay *display, GstCaps *caps)
+gst_vaapi_decoder_mpeg2_new (GstVaapiDisplay * display, GstCaps * caps)
 {
-    return gst_vaapi_decoder_new(gst_vaapi_decoder_mpeg2_class(),
-        display, caps);
+  return gst_vaapi_decoder_new (gst_vaapi_decoder_mpeg2_class (),
+      display, caps);
 }
index 3afb765..9ab340d 100644 (file)
 #define GST_VAAPI_DECODER_MPEG4_CAST(decoder) \
     ((GstVaapiDecoderMpeg4 *)(decoder))
 
-typedef struct _GstVaapiDecoderMpeg4Private     GstVaapiDecoderMpeg4Private;
-typedef struct _GstVaapiDecoderMpeg4Class       GstVaapiDecoderMpeg4Class;
-
-struct _GstVaapiDecoderMpeg4Private {
-    GstVaapiProfile                 profile;
-    guint                           level; 
-    guint                           width; 
-    guint                           height;
-    guint                           fps_n;
-    guint                           fps_d; 
-    guint                           coding_type;
-    GstMpeg4VisualObjectSequence    vos_hdr;
-    GstMpeg4VisualObject            vo_hdr;
-    GstMpeg4VideoSignalType         signal_type;
-    GstMpeg4VideoObjectLayer        vol_hdr;
-    GstMpeg4VideoObjectPlane        vop_hdr;
-    GstMpeg4VideoPlaneShortHdr      svh_hdr;
-    GstMpeg4VideoPacketHdr          packet_hdr;
-    GstMpeg4SpriteTrajectory        sprite_trajectory;
-    VAIQMatrixBufferMPEG4           iq_matrix;
-    GstVaapiPicture                *curr_picture;
-    // forward reference pic
-    GstVaapiPicture                *next_picture;
-    // backward reference pic
-    GstVaapiPicture                *prev_picture;
-    GstClockTime                    seq_pts;
-    GstClockTime                    gop_pts;
-    GstClockTime                    pts_diff;
-    GstClockTime                    max_pts;
-    // anchor sync time base for any picture type, 
-    // it is time base of backward reference frame
-    GstClockTime                    last_sync_time; 
-    // time base for recent I/P/S frame, 
-    // it is time base of forward reference frame for B frame
-    GstClockTime                    sync_time; 
-
-    /* last non-b-frame time by resolution */
-    GstClockTime                    last_non_b_scale_time;
-    GstClockTime                    non_b_scale_time;
-    GstClockTime                    trb;
-    GstClockTime                    trd;
-    // temporal_reference of previous frame of svh
-    guint8                          prev_t_ref;
-    guint                           is_opened               : 1;
-    guint                           is_first_field          : 1;
-    guint                           size_changed            : 1;
-    guint                           profile_changed         : 1;
-    guint                           progressive_sequence    : 1;
-    guint                           closed_gop              : 1;
-    guint                           broken_link             : 1;
-    guint                           calculate_pts_diff      : 1;
-    guint                           is_svh                  : 1;
+typedef struct _GstVaapiDecoderMpeg4Private GstVaapiDecoderMpeg4Private;
+typedef struct _GstVaapiDecoderMpeg4Class GstVaapiDecoderMpeg4Class;
+
+struct _GstVaapiDecoderMpeg4Private
+{
+  GstVaapiProfile profile;
+  guint level;
+  guint width;
+  guint height;
+  guint fps_n;
+  guint fps_d;
+  guint coding_type;
+  GstMpeg4VisualObjectSequence vos_hdr;
+  GstMpeg4VisualObject vo_hdr;
+  GstMpeg4VideoSignalType signal_type;
+  GstMpeg4VideoObjectLayer vol_hdr;
+  GstMpeg4VideoObjectPlane vop_hdr;
+  GstMpeg4VideoPlaneShortHdr svh_hdr;
+  GstMpeg4VideoPacketHdr packet_hdr;
+  GstMpeg4SpriteTrajectory sprite_trajectory;
+  VAIQMatrixBufferMPEG4 iq_matrix;
+  GstVaapiPicture *curr_picture;
+  // forward reference pic
+  GstVaapiPicture *next_picture;
+  // backward reference pic
+  GstVaapiPicture *prev_picture;
+  GstClockTime seq_pts;
+  GstClockTime gop_pts;
+  GstClockTime pts_diff;
+  GstClockTime max_pts;
+  // anchor sync time base for any picture type,
+  // it is time base of backward reference frame
+  GstClockTime last_sync_time;
+  // time base for recent I/P/S frame,
+  // it is time base of forward reference frame for B frame
+  GstClockTime sync_time;
+
+  /* last non-b-frame time by resolution */
+  GstClockTime last_non_b_scale_time;
+  GstClockTime non_b_scale_time;
+  GstClockTime trb;
+  GstClockTime trd;
+  // temporal_reference of previous frame of svh
+  guint8 prev_t_ref;
+  guint is_opened:1;
+  guint is_first_field:1;
+  guint size_changed:1;
+  guint profile_changed:1;
+  guint progressive_sequence:1;
+  guint closed_gop:1;
+  guint broken_link:1;
+  guint calculate_pts_diff:1;
+  guint is_svh:1;
 };
 
 /**
@@ -100,10 +101,11 @@ struct _GstVaapiDecoderMpeg4Private {
  *
  * A decoder based on Mpeg4.
  */
-struct _GstVaapiDecoderMpeg4 {
-    /*< private >*/
-    GstVaapiDecoder             parent_instance;
-    GstVaapiDecoderMpeg4Private priv;
+struct _GstVaapiDecoderMpeg4
+{
+  /*< private > */
+  GstVaapiDecoder parent_instance;
+  GstVaapiDecoderMpeg4Private priv;
 };
 
 /**
@@ -111,951 +113,966 @@ struct _GstVaapiDecoderMpeg4 {
  *
  * A decoder class based on Mpeg4.
  */
-struct _GstVaapiDecoderMpeg4Class {
-    /*< private >*/
-    GstVaapiDecoderClass parent_class;
+struct _GstVaapiDecoderMpeg4Class
+{
+  /*< private > */
+  GstVaapiDecoderClass parent_class;
 };
 
 static void
-gst_vaapi_decoder_mpeg4_close(GstVaapiDecoderMpeg4 *decoder)
+gst_vaapi_decoder_mpeg4_close (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
 
-    gst_vaapi_picture_replace(&priv->curr_picture, NULL);
-    gst_vaapi_picture_replace(&priv->next_picture, NULL);
-    gst_vaapi_picture_replace(&priv->prev_picture, NULL);
+  gst_vaapi_picture_replace (&priv->curr_picture, NULL);
+  gst_vaapi_picture_replace (&priv->next_picture, NULL);
+  gst_vaapi_picture_replace (&priv->prev_picture, NULL);
 }
 
 static gboolean
-gst_vaapi_decoder_mpeg4_open(GstVaapiDecoderMpeg4 *decoder)
+gst_vaapi_decoder_mpeg4_open (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER(decoder);
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstCaps *caps = NULL;
-    GstStructure *structure = NULL;
-
-    gst_vaapi_decoder_mpeg4_close(decoder);
-
-    priv->is_svh = 0;
-    caps = gst_vaapi_decoder_get_caps(base_decoder);
-    if (caps) {
-        structure = gst_caps_get_structure(caps, 0);
-        if (structure) {
-            if (gst_structure_has_name(structure, "video/x-h263")) {
-                priv->is_svh = 1;
-                priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
-                priv->prev_t_ref = -1;
-            }
-        }
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstCaps *caps = NULL;
+  GstStructure *structure = NULL;
+
+  gst_vaapi_decoder_mpeg4_close (decoder);
+
+  priv->is_svh = 0;
+  caps = gst_vaapi_decoder_get_caps (base_decoder);
+  if (caps) {
+    structure = gst_caps_get_structure (caps, 0);
+    if (structure) {
+      if (gst_structure_has_name (structure, "video/x-h263")) {
+        priv->is_svh = 1;
+        priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
+        priv->prev_t_ref = -1;
+      }
     }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 static void
-gst_vaapi_decoder_mpeg4_destroy(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg4_destroy (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg4 * const decoder =
-        GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
+  GstVaapiDecoderMpeg4 *const decoder =
+      GST_VAAPI_DECODER_MPEG4_CAST (base_decoder);
 
-    gst_vaapi_decoder_mpeg4_close(decoder);
+  gst_vaapi_decoder_mpeg4_close (decoder);
 }
 
 static gboolean
-gst_vaapi_decoder_mpeg4_create(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_mpeg4_create (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderMpeg4 * const decoder =
-        GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-
-    priv->profile               = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
-    priv->seq_pts               = GST_CLOCK_TIME_NONE;
-    priv->gop_pts               = GST_CLOCK_TIME_NONE;
-    priv->max_pts               = GST_CLOCK_TIME_NONE;
-    priv->calculate_pts_diff    = TRUE;
-    priv->size_changed          = TRUE;
-    priv->profile_changed       = TRUE;
-    return TRUE;
+  GstVaapiDecoderMpeg4 *const decoder =
+      GST_VAAPI_DECODER_MPEG4_CAST (base_decoder);
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+
+  priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
+  priv->seq_pts = GST_CLOCK_TIME_NONE;
+  priv->gop_pts = GST_CLOCK_TIME_NONE;
+  priv->max_pts = GST_CLOCK_TIME_NONE;
+  priv->calculate_pts_diff = TRUE;
+  priv->size_changed = TRUE;
+  priv->profile_changed = TRUE;
+  return TRUE;
 }
 
 static inline void
-copy_quant_matrix(guint8 dst[64], const guint8 src[64])
+copy_quant_matrix (guint8 dst[64], const guint8 src[64])
 {
-    memcpy(dst, src, 64);
+  memcpy (dst, src, 64);
 }
 
 static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderMpeg4 *decoder)
+ensure_context (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiProfile profiles[2];
-    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
-    guint i, n_profiles = 0;
-    gboolean reset_context = FALSE;
-
-    if (priv->profile_changed) {
-        GST_DEBUG("profile changed");
-        priv->profile_changed = FALSE;
-        reset_context         = TRUE;
-
-        profiles[n_profiles++] = priv->profile;
-        if (priv->profile == GST_VAAPI_PROFILE_MPEG4_SIMPLE)
-            profiles[n_profiles++] = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
-
-        for (i = 0; i < n_profiles; i++) {
-            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
-                                              profiles[i], entrypoint))
-                break;
-        }
-        if (i == n_profiles)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-        priv->profile = profiles[i];
-    }
-
-    if (priv->size_changed) {
-        GST_DEBUG("size changed");
-        priv->size_changed = FALSE;
-        reset_context      = TRUE;
-    }
-
-    if (reset_context) {
-        GstVaapiContextInfo info;
-
-        info.profile    = priv->profile;
-        info.entrypoint = entrypoint;
-        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
-        info.width      = priv->width;
-        info.height     = priv->height;
-        info.ref_frames = 2;
-        reset_context   = gst_vaapi_decoder_ensure_context(
-            GST_VAAPI_DECODER(decoder),
-            &info
-        );
-        if (!reset_context)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiProfile profiles[2];
+  GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+  guint i, n_profiles = 0;
+  gboolean reset_context = FALSE;
+
+  if (priv->profile_changed) {
+    GST_DEBUG ("profile changed");
+    priv->profile_changed = FALSE;
+    reset_context = TRUE;
+
+    profiles[n_profiles++] = priv->profile;
+    if (priv->profile == GST_VAAPI_PROFILE_MPEG4_SIMPLE)
+      profiles[n_profiles++] = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
+
+    for (i = 0; i < n_profiles; i++) {
+      if (gst_vaapi_display_has_decoder (GST_VAAPI_DECODER_DISPLAY (decoder),
+              profiles[i], entrypoint))
+        break;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    if (i == n_profiles)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+    priv->profile = profiles[i];
+  }
+
+  if (priv->size_changed) {
+    GST_DEBUG ("size changed");
+    priv->size_changed = FALSE;
+    reset_context = TRUE;
+  }
+
+  if (reset_context) {
+    GstVaapiContextInfo info;
+
+    info.profile = priv->profile;
+    info.entrypoint = entrypoint;
+    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+    info.width = priv->width;
+    info.height = priv->height;
+    info.ref_frames = 2;
+    reset_context =
+        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
+    if (!reset_context)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-ensure_quant_matrix(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
+ensure_quant_matrix (GstVaapiDecoderMpeg4 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    VAIQMatrixBufferMPEG4 *iq_matrix;
-
-    if (!priv->vol_hdr.load_intra_quant_mat && !priv->vol_hdr.load_non_intra_quant_mat) {
-            return GST_VAAPI_DECODER_STATUS_SUCCESS;
-    }
-
-    picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(MPEG4, decoder);
-    if (!picture->iq_matrix) {
-        GST_DEBUG("failed to allocate IQ matrix");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    iq_matrix = picture->iq_matrix->param;
-
-    if (priv->vol_hdr.load_intra_quant_mat) {
-        iq_matrix->load_intra_quant_mat = 1;
-        copy_quant_matrix(iq_matrix->intra_quant_mat,
-                          priv->vol_hdr.intra_quant_mat);
-    }
-    else
-        iq_matrix->load_intra_quant_mat = 0;
-
-    if (priv->vol_hdr.load_non_intra_quant_mat) {
-        iq_matrix->load_non_intra_quant_mat = 1;
-        copy_quant_matrix(iq_matrix->non_intra_quant_mat,
-                      priv->vol_hdr.non_intra_quant_mat);
-    }
-    else
-        iq_matrix->load_non_intra_quant_mat = 0;
-    
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  VAIQMatrixBufferMPEG4 *iq_matrix;
 
+  if (!priv->vol_hdr.load_intra_quant_mat
+      && !priv->vol_hdr.load_non_intra_quant_mat) {
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  }
+
+  picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (MPEG4, decoder);
+  if (!picture->iq_matrix) {
+    GST_DEBUG ("failed to allocate IQ matrix");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  iq_matrix = picture->iq_matrix->param;
+
+  if (priv->vol_hdr.load_intra_quant_mat) {
+    iq_matrix->load_intra_quant_mat = 1;
+    copy_quant_matrix (iq_matrix->intra_quant_mat,
+        priv->vol_hdr.intra_quant_mat);
+  } else
+    iq_matrix->load_intra_quant_mat = 0;
+
+  if (priv->vol_hdr.load_non_intra_quant_mat) {
+    iq_matrix->load_non_intra_quant_mat = 1;
+    copy_quant_matrix (iq_matrix->non_intra_quant_mat,
+        priv->vol_hdr.non_intra_quant_mat);
+  } else
+    iq_matrix->load_non_intra_quant_mat = 0;
+
+
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline GstVaapiDecoderStatus
-render_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
+render_picture (GstVaapiDecoderMpeg4 * decoder, GstVaapiPicture * picture)
 {
-    if (!gst_vaapi_picture_output(picture))
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  if (!gst_vaapi_picture_output (picture))
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 /* decode_picture() start to decode a frame/picture
- * decode_current_picture() finishe decoding a frame/picture 
+ * decode_current_picture() finishe decoding a frame/picture
  * (commit buffer to driver for decoding)
  */
 static GstVaapiDecoderStatus
-decode_current_picture(GstVaapiDecoderMpeg4 *decoder)
+decode_current_picture (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->curr_picture;
-    GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (picture) {
-        if (!gst_vaapi_picture_decode(picture))
-            status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-        if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
-            if ((priv->prev_picture && priv->next_picture) ||
-                (priv->closed_gop && priv->next_picture))
-                status = render_picture(decoder, picture);
-        }
-        gst_vaapi_picture_replace(&priv->curr_picture, NULL);
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->curr_picture;
+  GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+  if (picture) {
+    if (!gst_vaapi_picture_decode (picture))
+      status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+    if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
+      if ((priv->prev_picture && priv->next_picture) ||
+          (priv->closed_gop && priv->next_picture))
+        status = render_picture (decoder, picture);
     }
-    return status;
+    gst_vaapi_picture_replace (&priv->curr_picture, NULL);
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
+decode_sequence (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
+    guint buf_size)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4VisualObjectSequence * const vos_hdr = &priv->vos_hdr;
-    GstVaapiProfile profile;
-
-    if (gst_mpeg4_parse_visual_object_sequence(vos_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
-        GST_DEBUG("failed to parse sequence header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-
-    priv->level = vos_hdr->level;
-    switch (vos_hdr->profile) {
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4VisualObjectSequence *const vos_hdr = &priv->vos_hdr;
+  GstVaapiProfile profile;
+
+  if (gst_mpeg4_parse_visual_object_sequence (vos_hdr, buf,
+          buf_size) != GST_MPEG4_PARSER_OK) {
+    GST_DEBUG ("failed to parse sequence header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+
+  priv->level = vos_hdr->level;
+  switch (vos_hdr->profile) {
     case GST_MPEG4_PROFILE_SIMPLE:
-        profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
-        break;
+      profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
+      break;
     case GST_MPEG4_PROFILE_ADVANCED_SIMPLE:
-    case GST_MPEG4_PROFILE_SIMPLE_SCALABLE: /* shared profile with ADVANCED_SIMPLE */
-        profile = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
-        break;
+    case GST_MPEG4_PROFILE_SIMPLE_SCALABLE:    /* shared profile with ADVANCED_SIMPLE */
+      profile = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
+      break;
     default:
-        GST_DEBUG("unsupported profile %d", vos_hdr->profile);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-    if (priv->profile != profile) {
-        priv->profile = profile;
-        priv->profile_changed = TRUE;
-    }
-    priv->seq_pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
-    priv->size_changed          = TRUE;
-
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      GST_DEBUG ("unsupported profile %d", vos_hdr->profile);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+  if (priv->profile != profile) {
+    priv->profile = profile;
+    priv->profile_changed = TRUE;
+  }
+  priv->seq_pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
+  priv->size_changed = TRUE;
+
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderMpeg4 *decoder)
+decode_sequence_end (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-
-    if (priv->curr_picture) {
-        status = decode_current_picture(decoder);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-        status = render_picture(decoder, priv->curr_picture);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
 
-    if (priv->next_picture) {
-        status = render_picture(decoder, priv->next_picture);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
-    return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
+  if (priv->curr_picture) {
+    status = decode_current_picture (decoder);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+    status = render_picture (decoder, priv->curr_picture);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+
+  if (priv->next_picture) {
+    status = render_picture (decoder, priv->next_picture);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+  return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
 }
 
 static GstVaapiDecoderStatus
-decode_visual_object(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
+decode_visual_object (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
+    guint buf_size)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
-    GstMpeg4VideoSignalType * signal_type = &priv->signal_type;
-
-    if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf, buf_size) != GST_MPEG4_PARSER_OK) {
-        GST_DEBUG("failed to parse visual object");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-
-    /* XXX: video_signal_type isn't used for decoding */
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4VisualObject *vo_hdr = &priv->vo_hdr;
+  GstMpeg4VideoSignalType *signal_type = &priv->signal_type;
+
+  if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf,
+          buf_size) != GST_MPEG4_PARSER_OK) {
+    GST_DEBUG ("failed to parse visual object");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+
+  /* XXX: video_signal_type isn't used for decoding */
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_video_object_layer(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
+decode_video_object_layer (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
+    guint buf_size)
 {
-    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
-    GstMpeg4VideoObjectLayer * vol_hdr = &priv->vol_hdr;
-
-    if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
-        GST_DEBUG("failed to parse video object layer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4VisualObject *vo_hdr = &priv->vo_hdr;
+  GstMpeg4VideoObjectLayer *vol_hdr = &priv->vol_hdr;
 
-    priv->width  = vol_hdr->width;
-    priv->height = vol_hdr->height;
+  if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf,
+          buf_size) != GST_MPEG4_PARSER_OK) {
+    GST_DEBUG ("failed to parse video object layer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
 
-    priv->progressive_sequence  = !vol_hdr->interlaced;
+  priv->width = vol_hdr->width;
+  priv->height = vol_hdr->height;
 
-    if (vol_hdr->fixed_vop_rate) {
-        priv->fps_n = vol_hdr->vop_time_increment_resolution;
-        priv->fps_d = vol_hdr->fixed_vop_time_increment;
-        gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);    
-    }
+  priv->progressive_sequence = !vol_hdr->interlaced;
 
-    gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder, priv->vol_hdr.par_width, priv->vol_hdr.par_height);
-    gst_vaapi_decoder_set_picture_size(base_decoder, priv->width, priv->height);
+  if (vol_hdr->fixed_vop_rate) {
+    priv->fps_n = vol_hdr->vop_time_increment_resolution;
+    priv->fps_d = vol_hdr->fixed_vop_time_increment;
+    gst_vaapi_decoder_set_framerate (base_decoder, priv->fps_n, priv->fps_d);
+  }
 
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder,
+      priv->vol_hdr.par_width, priv->vol_hdr.par_height);
+  gst_vaapi_decoder_set_picture_size (base_decoder, priv->width, priv->height);
+
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
+decode_gop (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf, guint buf_size)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4GroupOfVOP gop;
-    GstClockTime gop_time;
-
-    if (buf_size >4) {
-        if (gst_mpeg4_parse_group_of_vop(&gop, buf, buf_size) != GST_MPEG4_PARSER_OK) {
-        GST_DEBUG("failed to parse GOP");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        }
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4GroupOfVOP gop;
+  GstClockTime gop_time;
+
+  if (buf_size > 4) {
+    if (gst_mpeg4_parse_group_of_vop (&gop, buf,
+            buf_size) != GST_MPEG4_PARSER_OK) {
+      GST_DEBUG ("failed to parse GOP");
+      return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
     }
-    else {
-        gop.closed          = 1;
-        gop.broken_link     = 0;
-        gop.hours           = 0;
-        gop.minutes         = 0;
-        gop.seconds         = 0;
-    }
-
-    priv->closed_gop  = gop.closed;
-    priv->broken_link = gop.broken_link;
-
-    GST_DEBUG("GOP %02u:%02u:%02u (closed_gop %d, broken_link %d)",
-              gop.hours, gop.minutes, gop.seconds,
-              priv->closed_gop, priv->broken_link);
-
-    gop_time             = gop.hours * 3600 + gop.minutes * 60 + gop.seconds;
-    priv->last_sync_time = gop_time;
-    priv->sync_time      = gop_time;
-    
-    if (priv->gop_pts != GST_CLOCK_TIME_NONE)
-        priv->pts_diff += gop_time * GST_SECOND - priv->gop_pts;
-    priv->gop_pts = gop_time * GST_SECOND;
-    priv->calculate_pts_diff = TRUE;
-    priv->is_first_field = TRUE;
-
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  } else {
+    gop.closed = 1;
+    gop.broken_link = 0;
+    gop.hours = 0;
+    gop.minutes = 0;
+    gop.seconds = 0;
+  }
+
+  priv->closed_gop = gop.closed;
+  priv->broken_link = gop.broken_link;
+
+  GST_DEBUG ("GOP %02u:%02u:%02u (closed_gop %d, broken_link %d)",
+      gop.hours, gop.minutes, gop.seconds, priv->closed_gop, priv->broken_link);
+
+  gop_time = gop.hours * 3600 + gop.minutes * 60 + gop.seconds;
+  priv->last_sync_time = gop_time;
+  priv->sync_time = gop_time;
+
+  if (priv->gop_pts != GST_CLOCK_TIME_NONE)
+    priv->pts_diff += gop_time * GST_SECOND - priv->gop_pts;
+  priv->gop_pts = gop_time * GST_SECOND;
+  priv->calculate_pts_diff = TRUE;
+  priv->is_first_field = TRUE;
+
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 void
-calculate_pts_diff(GstVaapiDecoderMpeg4 *decoder,
-                      GstMpeg4VideoObjectLayer *vol_hdr,
-                      GstMpeg4VideoObjectPlane *vop_hdr)
-{
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstClockTime frame_timestamp;
-
-    frame_timestamp = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
-    if (frame_timestamp && frame_timestamp != GST_CLOCK_TIME_NONE) {
-        /* Buffer with timestamp */
-        if (priv->max_pts != GST_CLOCK_TIME_NONE &&
-            frame_timestamp < priv->max_pts) {
-            frame_timestamp = priv->max_pts +
-                gst_util_uint64_scale((vol_hdr->fixed_vop_rate ?
-                                       vol_hdr->fixed_vop_time_increment : 1),
-                                      GST_SECOND,
-                                      vol_hdr->vop_time_increment_resolution);
-        }
-    } else {
-        /* Buffer without timestamp set */
-        if (priv->max_pts == GST_CLOCK_TIME_NONE) /* first buffer */
-            frame_timestamp = 0;
-        else {
-            GstClockTime tmp_pts;
-            tmp_pts = priv->pts_diff + priv->gop_pts +
-                vop_hdr->modulo_time_base * GST_SECOND +
-                gst_util_uint64_scale(vop_hdr->time_increment,
-                                      GST_SECOND,
-                                      vol_hdr->vop_time_increment_resolution);
-            if (tmp_pts > priv->max_pts)
-                frame_timestamp = tmp_pts;
-            else
-                frame_timestamp = priv->max_pts +
-                    gst_util_uint64_scale((vol_hdr->fixed_vop_rate ?
-                                           vol_hdr->fixed_vop_time_increment : 1),
-                                           GST_SECOND,
-                                          vol_hdr->vop_time_increment_resolution);
-        }
-    }
-
-    priv->pts_diff = frame_timestamp -
-        (priv->gop_pts + vop_hdr->modulo_time_base * GST_SECOND +
-         gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND,
-                               vol_hdr->vop_time_increment_resolution));
-}
-static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
+calculate_pts_diff (GstVaapiDecoderMpeg4 * decoder,
+    GstMpeg4VideoObjectLayer * vol_hdr, GstMpeg4VideoObjectPlane * vop_hdr)
 {
-    GstMpeg4ParseResult parser_result = GST_MPEG4_PARSER_OK;
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
-    GstMpeg4VideoObjectLayer * const vol_hdr = &priv->vol_hdr;
-    GstMpeg4SpriteTrajectory * const sprite_trajectory = &priv->sprite_trajectory;
-    GstVaapiPicture *picture;
-    GstVaapiDecoderStatus status;
-    GstClockTime pts;
-
-    // context depends on priv->width and priv->height, so we move parse_vop a little earlier
-    if (priv->is_svh) {
-        parser_result = gst_mpeg4_parse_video_plane_short_header(&priv->svh_hdr, buf, buf_size);
-
-    }
-    else {
-        parser_result = gst_mpeg4_parse_video_object_plane(vop_hdr, sprite_trajectory, vol_hdr, buf, buf_size);
-        /* Need to skip this frame if VOP was not coded */
-        if (GST_MPEG4_PARSER_OK == parser_result && !vop_hdr->coded)
-            return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
-    }
-
-    if (parser_result != GST_MPEG4_PARSER_OK) {
-        GST_DEBUG("failed to parse picture header");
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-
-    if (priv->is_svh) {
-        priv->width = priv->svh_hdr.vop_width;
-        priv->height = priv->svh_hdr.vop_height;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstClockTime frame_timestamp;
+
+  frame_timestamp = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
+  if (frame_timestamp && frame_timestamp != GST_CLOCK_TIME_NONE) {
+    /* Buffer with timestamp */
+    if (priv->max_pts != GST_CLOCK_TIME_NONE && frame_timestamp < priv->max_pts) {
+      frame_timestamp = priv->max_pts +
+          gst_util_uint64_scale ((vol_hdr->fixed_vop_rate ?
+              vol_hdr->fixed_vop_time_increment : 1),
+          GST_SECOND, vol_hdr->vop_time_increment_resolution);
     }
+  } else {
+    /* Buffer without timestamp set */
+    if (priv->max_pts == GST_CLOCK_TIME_NONE)   /* first buffer */
+      frame_timestamp = 0;
     else {
-        if (!vop_hdr->width && !vop_hdr->height) {
-            vop_hdr->width = vol_hdr->width;
-            vop_hdr->height = vol_hdr->height;
-        }
-        priv->width = vop_hdr->width;
-        priv->height = vop_hdr->height;
+      GstClockTime tmp_pts;
+      tmp_pts = priv->pts_diff + priv->gop_pts +
+          vop_hdr->modulo_time_base * GST_SECOND +
+          gst_util_uint64_scale (vop_hdr->time_increment,
+          GST_SECOND, vol_hdr->vop_time_increment_resolution);
+      if (tmp_pts > priv->max_pts)
+        frame_timestamp = tmp_pts;
+      else
+        frame_timestamp = priv->max_pts +
+            gst_util_uint64_scale ((vol_hdr->fixed_vop_rate ?
+                vol_hdr->fixed_vop_time_increment : 1),
+            GST_SECOND, vol_hdr->vop_time_increment_resolution);
     }
+  }
 
-    status = ensure_context(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_DEBUG("failed to reset context");
-        return status;
-    }
-
-    if (priv->curr_picture) {
-        status = decode_current_picture(decoder);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
+  priv->pts_diff = frame_timestamp -
+      (priv->gop_pts + vop_hdr->modulo_time_base * GST_SECOND +
+      gst_util_uint64_scale (vop_hdr->time_increment, GST_SECOND,
+          vol_hdr->vop_time_increment_resolution));
+}
 
-    priv->curr_picture = GST_VAAPI_PICTURE_NEW(MPEG4, decoder);
-    if (!priv->curr_picture) {
-        GST_DEBUG("failed to allocate picture");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+static GstVaapiDecoderStatus
+decode_picture (GstVaapiDecoderMpeg4 * decoder, const guint8 * buf,
+    guint buf_size)
+{
+  GstMpeg4ParseResult parser_result = GST_MPEG4_PARSER_OK;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4VideoObjectPlane *const vop_hdr = &priv->vop_hdr;
+  GstMpeg4VideoObjectLayer *const vol_hdr = &priv->vol_hdr;
+  GstMpeg4SpriteTrajectory *const sprite_trajectory = &priv->sprite_trajectory;
+  GstVaapiPicture *picture;
+  GstVaapiDecoderStatus status;
+  GstClockTime pts;
+
+  // context depends on priv->width and priv->height, so we move parse_vop a little earlier
+  if (priv->is_svh) {
+    parser_result =
+        gst_mpeg4_parse_video_plane_short_header (&priv->svh_hdr, buf,
+        buf_size);
+
+  } else {
+    parser_result =
+        gst_mpeg4_parse_video_object_plane (vop_hdr, sprite_trajectory, vol_hdr,
+        buf, buf_size);
+    /* Need to skip this frame if VOP was not coded */
+    if (GST_MPEG4_PARSER_OK == parser_result && !vop_hdr->coded)
+      return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+  }
+
+  if (parser_result != GST_MPEG4_PARSER_OK) {
+    GST_DEBUG ("failed to parse picture header");
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+
+  if (priv->is_svh) {
+    priv->width = priv->svh_hdr.vop_width;
+    priv->height = priv->svh_hdr.vop_height;
+  } else {
+    if (!vop_hdr->width && !vop_hdr->height) {
+      vop_hdr->width = vol_hdr->width;
+      vop_hdr->height = vol_hdr->height;
     }
-    picture = priv->curr_picture;
+    priv->width = vop_hdr->width;
+    priv->height = vop_hdr->height;
+  }
 
-    status = ensure_quant_matrix(decoder, picture);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_DEBUG("failed to reset quantizer matrix");
-        return status;
-    }
+  status = ensure_context (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_DEBUG ("failed to reset context");
+    return status;
+  }
 
-    /* 7.6.7 Temporal prediction structure
-     * forward reference frame     B B B B B B      backward reference frame
-     *            |                                              |
-     *  nearest I/P/S in the past with vop_coded ==1             |
-     *                         nearest I/P/S in the future with any vop_coded
-     * fixme, it said that B frame shouldn't use backward reference frame 
-     *        when backward reference frame coded is 0
-     */
-    if (priv->is_svh) {
-        priv->coding_type = priv->svh_hdr.picture_coding_type;
-    }
-    else {
-        priv->coding_type = priv->vop_hdr.coding_type;
-    }
-    switch (priv->coding_type) {
+  if (priv->curr_picture) {
+    status = decode_current_picture (decoder);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+
+  priv->curr_picture = GST_VAAPI_PICTURE_NEW (MPEG4, decoder);
+  if (!priv->curr_picture) {
+    GST_DEBUG ("failed to allocate picture");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  picture = priv->curr_picture;
+
+  status = ensure_quant_matrix (decoder, picture);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_DEBUG ("failed to reset quantizer matrix");
+    return status;
+  }
+
+  /* 7.6.7 Temporal prediction structure
+   * forward reference frame     B B B B B B      backward reference frame
+   *            |                                              |
+   *  nearest I/P/S in the past with vop_coded ==1             |
+   *                         nearest I/P/S in the future with any vop_coded
+   * fixme, it said that B frame shouldn't use backward reference frame
+   *        when backward reference frame coded is 0
+   */
+  if (priv->is_svh) {
+    priv->coding_type = priv->svh_hdr.picture_coding_type;
+  } else {
+    priv->coding_type = priv->vop_hdr.coding_type;
+  }
+  switch (priv->coding_type) {
     case GST_MPEG4_I_VOP:
-        picture->type = GST_VAAPI_PICTURE_TYPE_I;
-        if (priv->is_svh || vop_hdr->coded) 
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_I;
+      if (priv->is_svh || vop_hdr->coded)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      break;
     case GST_MPEG4_P_VOP:
-        picture->type = GST_VAAPI_PICTURE_TYPE_P;
-        if (priv->is_svh || vop_hdr->coded) 
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_P;
+      if (priv->is_svh || vop_hdr->coded)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      break;
     case GST_MPEG4_B_VOP:
-        picture->type = GST_VAAPI_PICTURE_TYPE_B;
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_B;
+      break;
     case GST_MPEG4_S_VOP:
-        picture->type = GST_VAAPI_PICTURE_TYPE_S;
-        // see 3.175 reference VOP
-        if (vop_hdr->coded) 
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_S;
+      // see 3.175 reference VOP
+      if (vop_hdr->coded)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      break;
     default:
-        GST_DEBUG("unsupported picture type %d", priv->coding_type);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
-
-    if (!priv->is_svh && !vop_hdr->coded) {
-        status = render_picture(decoder, priv->prev_picture);
-        return status;
-    }
+      GST_DEBUG ("unsupported picture type %d", priv->coding_type);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    if (priv->is_svh) {
-        guint temp_ref = priv->svh_hdr.temporal_reference;
-        guint delta_ref;
+  if (!priv->is_svh && !vop_hdr->coded) {
+    status = render_picture (decoder, priv->prev_picture);
+    return status;
+  }
 
-        if (temp_ref < priv->prev_t_ref) {
-            temp_ref += 256;
-        }
-        delta_ref = temp_ref - priv->prev_t_ref;
+  if (priv->is_svh) {
+    guint temp_ref = priv->svh_hdr.temporal_reference;
+    guint delta_ref;
 
-        pts = priv->sync_time;
-        // see temporal_reference definition in spec, 30000/1001Hz
-        pts += gst_util_uint64_scale(delta_ref, GST_SECOND*1001, 30000);
-        priv->sync_time = pts;
-        priv->prev_t_ref = priv->svh_hdr.temporal_reference;
+    if (temp_ref < priv->prev_t_ref) {
+      temp_ref += 256;
     }
-    else {
-        /* Update priv->pts_diff */
-        if (priv->calculate_pts_diff) {
-            calculate_pts_diff(decoder, vol_hdr, vop_hdr);
-            priv->calculate_pts_diff = FALSE;
-        }
-
-        /* Update presentation time, 6.3.5 */
-        if(vop_hdr->coding_type != GST_MPEG4_B_VOP) {
-            // increment basing on decoding order
-            priv->last_sync_time = priv->sync_time;
-            priv->sync_time = priv->last_sync_time + vop_hdr->modulo_time_base;
-            pts = priv->sync_time * GST_SECOND;
-            pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
-            priv->last_non_b_scale_time = priv->non_b_scale_time;
-            priv->non_b_scale_time = priv->sync_time * vol_hdr->vop_time_increment_resolution + vop_hdr->time_increment;
-            priv->trd  = priv->non_b_scale_time - priv->last_non_b_scale_time;
-        }
-        else {
-            // increment basing on display oder
-            pts = (priv->last_sync_time + vop_hdr->modulo_time_base) * GST_SECOND;
-            pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
-            priv->trb = (priv->last_sync_time + vop_hdr->modulo_time_base) * vol_hdr->vop_time_increment_resolution +
-                vop_hdr->time_increment - priv->last_non_b_scale_time;
-        }
+    delta_ref = temp_ref - priv->prev_t_ref;
+
+    pts = priv->sync_time;
+    // see temporal_reference definition in spec, 30000/1001Hz
+    pts += gst_util_uint64_scale (delta_ref, GST_SECOND * 1001, 30000);
+    priv->sync_time = pts;
+    priv->prev_t_ref = priv->svh_hdr.temporal_reference;
+  } else {
+    /* Update priv->pts_diff */
+    if (priv->calculate_pts_diff) {
+      calculate_pts_diff (decoder, vol_hdr, vop_hdr);
+      priv->calculate_pts_diff = FALSE;
     }
-    picture->pts = pts + priv->pts_diff;
-    if (priv->max_pts == GST_CLOCK_TIME_NONE || priv->max_pts < picture->pts)
-        priv->max_pts = picture->pts;
-
-    /* Update reference pictures */
-    /* XXX: consider priv->vol_hdr.low_delay, consider packed video frames for DivX/XviD */
-    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
-        if (priv->next_picture)
-            status = render_picture(decoder, priv->next_picture);
-        gst_vaapi_picture_replace(&priv->prev_picture, priv->next_picture);
-        gst_vaapi_picture_replace(&priv->next_picture, picture);
+
+    /* Update presentation time, 6.3.5 */
+    if (vop_hdr->coding_type != GST_MPEG4_B_VOP) {
+      // increment basing on decoding order
+      priv->last_sync_time = priv->sync_time;
+      priv->sync_time = priv->last_sync_time + vop_hdr->modulo_time_base;
+      pts = priv->sync_time * GST_SECOND;
+      pts +=
+          gst_util_uint64_scale (vop_hdr->time_increment, GST_SECOND,
+          vol_hdr->vop_time_increment_resolution);
+      priv->last_non_b_scale_time = priv->non_b_scale_time;
+      priv->non_b_scale_time =
+          priv->sync_time * vol_hdr->vop_time_increment_resolution +
+          vop_hdr->time_increment;
+      priv->trd = priv->non_b_scale_time - priv->last_non_b_scale_time;
+    } else {
+      // increment basing on display oder
+      pts = (priv->last_sync_time + vop_hdr->modulo_time_base) * GST_SECOND;
+      pts +=
+          gst_util_uint64_scale (vop_hdr->time_increment, GST_SECOND,
+          vol_hdr->vop_time_increment_resolution);
+      priv->trb =
+          (priv->last_sync_time +
+          vop_hdr->modulo_time_base) * vol_hdr->vop_time_increment_resolution +
+          vop_hdr->time_increment - priv->last_non_b_scale_time;
     }
-    return status;
+  }
+  picture->pts = pts + priv->pts_diff;
+  if (priv->max_pts == GST_CLOCK_TIME_NONE || priv->max_pts < picture->pts)
+    priv->max_pts = picture->pts;
+
+  /* Update reference pictures */
+  /* XXX: consider priv->vol_hdr.low_delay, consider packed video frames for DivX/XviD */
+  if (GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
+    if (priv->next_picture)
+      status = render_picture (decoder, priv->next_picture);
+    gst_vaapi_picture_replace (&priv->prev_picture, priv->next_picture);
+    gst_vaapi_picture_replace (&priv->next_picture, picture);
+  }
+  return status;
 }
 
 static inline guint
-get_vop_coding_type(GstVaapiPicture *picture)
+get_vop_coding_type (GstVaapiPicture * picture)
 {
-    return picture->type - GST_VAAPI_PICTURE_TYPE_I;
+  return picture->type - GST_VAAPI_PICTURE_TYPE_I;
 }
 
 static gboolean
-fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
+fill_picture (GstVaapiDecoderMpeg4 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    VAPictureParameterBufferMPEG4 * const pic_param = picture->param;
-    GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
-
-    /* Fill in VAPictureParameterBufferMPEG4 */
-    pic_param->forward_reference_picture                        = VA_INVALID_ID;
-    pic_param->backward_reference_picture                       = VA_INVALID_ID;
-
-    pic_param->vol_fields.value                                 = 0;
-    pic_param->vop_fields.value                                 = 0;
-    if(priv->is_svh) {
-        // vol_hdr Parameters
-        pic_param->vol_fields.bits.short_video_header           = 1; 
-        // does the following vol_hdr parameters matter for short video header?
-        pic_param->vol_fields.bits.chroma_format                = 1; // I420, see table 6-15. 
-        pic_param->vol_fields.bits.interlaced                   = 0; 
-        pic_param->vol_fields.bits.obmc_disable                 = 1;
-        pic_param->vol_fields.bits.sprite_enable                = 0;
-        pic_param->vol_fields.bits.sprite_warping_accuracy      = 0;
-        pic_param->vol_fields.bits.quant_type                   = 0; //method 1; $7.4.4
-        pic_param->vol_fields.bits.quarter_sample               = 0; 
-        pic_param->vol_fields.bits.data_partitioned             = 0; 
-        pic_param->vol_fields.bits.reversible_vlc               = 0; 
-        pic_param->vol_fields.bits.resync_marker_disable        = 1; 
-        pic_param->no_of_sprite_warping_points                  = 0; 
-        pic_param->quant_precision                              = 5;
-        // VOP parameters    
-        pic_param->vop_width                                    = priv->svh_hdr.vop_width;
-        pic_param->vop_height                                   = priv->svh_hdr.vop_height;
-        pic_param->vop_fields.bits.vop_coding_type              = priv->svh_hdr.picture_coding_type;
-        pic_param->vop_time_increment_resolution                = priv->vol_hdr.vop_time_increment_resolution; 
-        
-        pic_param->num_gobs_in_vop                              = priv->svh_hdr.num_gobs_in_vop;
-        pic_param->num_macroblocks_in_gob                       = priv->svh_hdr.num_macroblocks_in_gob;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  VAPictureParameterBufferMPEG4 *const pic_param = picture->param;
+  GstMpeg4VideoObjectPlane *const vop_hdr = &priv->vop_hdr;
+
+  /* Fill in VAPictureParameterBufferMPEG4 */
+  pic_param->forward_reference_picture = VA_INVALID_ID;
+  pic_param->backward_reference_picture = VA_INVALID_ID;
+
+  pic_param->vol_fields.value = 0;
+  pic_param->vop_fields.value = 0;
+  if (priv->is_svh) {
+    // vol_hdr Parameters
+    pic_param->vol_fields.bits.short_video_header = 1;
+    // does the following vol_hdr parameters matter for short video header?
+    pic_param->vol_fields.bits.chroma_format = 1;       // I420, see table 6-15.
+    pic_param->vol_fields.bits.interlaced = 0;
+    pic_param->vol_fields.bits.obmc_disable = 1;
+    pic_param->vol_fields.bits.sprite_enable = 0;
+    pic_param->vol_fields.bits.sprite_warping_accuracy = 0;
+    pic_param->vol_fields.bits.quant_type = 0;  //method 1; $7.4.4
+    pic_param->vol_fields.bits.quarter_sample = 0;
+    pic_param->vol_fields.bits.data_partitioned = 0;
+    pic_param->vol_fields.bits.reversible_vlc = 0;
+    pic_param->vol_fields.bits.resync_marker_disable = 1;
+    pic_param->no_of_sprite_warping_points = 0;
+    pic_param->quant_precision = 5;
+    // VOP parameters
+    pic_param->vop_width = priv->svh_hdr.vop_width;
+    pic_param->vop_height = priv->svh_hdr.vop_height;
+    pic_param->vop_fields.bits.vop_coding_type =
+        priv->svh_hdr.picture_coding_type;
+    pic_param->vop_time_increment_resolution =
+        priv->vol_hdr.vop_time_increment_resolution;
+
+    pic_param->num_gobs_in_vop = priv->svh_hdr.num_gobs_in_vop;
+    pic_param->num_macroblocks_in_gob = priv->svh_hdr.num_macroblocks_in_gob;
+  } else {
+    int i;
+
+    // VOL parameters
+    pic_param->vol_fields.bits.short_video_header = 0;
+    pic_param->vol_fields.bits.chroma_format = priv->vol_hdr.chroma_format;
+    pic_param->vol_fields.bits.interlaced = priv->vol_hdr.interlaced;
+    pic_param->vol_fields.bits.obmc_disable = priv->vol_hdr.obmc_disable;
+    pic_param->vol_fields.bits.sprite_enable = priv->vol_hdr.sprite_enable;
+    pic_param->vol_fields.bits.sprite_warping_accuracy =
+        priv->vol_hdr.sprite_warping_accuracy;
+    pic_param->vol_fields.bits.quant_type = priv->vol_hdr.quant_type;
+    pic_param->vol_fields.bits.quarter_sample = priv->vol_hdr.quarter_sample;
+    pic_param->vol_fields.bits.data_partitioned =
+        priv->vol_hdr.data_partitioned;
+    pic_param->vol_fields.bits.reversible_vlc = priv->vol_hdr.reversible_vlc;
+    pic_param->vol_fields.bits.resync_marker_disable =
+        priv->vol_hdr.resync_marker_disable;
+    pic_param->no_of_sprite_warping_points =
+        priv->vol_hdr.no_of_sprite_warping_points;
+
+    for (i = 0; i < 3 && i < priv->vol_hdr.no_of_sprite_warping_points; i++) {
+      pic_param->sprite_trajectory_du[i] =
+          priv->sprite_trajectory.vop_ref_points[i];
+      pic_param->sprite_trajectory_dv[i] =
+          priv->sprite_trajectory.sprite_ref_points[i];
     }
-    else {
-        int i;
-
-        // VOL parameters
-        pic_param->vol_fields.bits.short_video_header           = 0; 
-        pic_param->vol_fields.bits.chroma_format                = priv->vol_hdr.chroma_format;
-        pic_param->vol_fields.bits.interlaced                   = priv->vol_hdr.interlaced;
-        pic_param->vol_fields.bits.obmc_disable                 = priv->vol_hdr.obmc_disable;
-        pic_param->vol_fields.bits.sprite_enable                = priv->vol_hdr.sprite_enable;
-        pic_param->vol_fields.bits.sprite_warping_accuracy      = priv->vol_hdr.sprite_warping_accuracy; 
-        pic_param->vol_fields.bits.quant_type                   = priv->vol_hdr.quant_type;
-        pic_param->vol_fields.bits.quarter_sample               = priv->vol_hdr.quarter_sample;
-        pic_param->vol_fields.bits.data_partitioned             = priv->vol_hdr.data_partitioned;
-        pic_param->vol_fields.bits.reversible_vlc               = priv->vol_hdr.reversible_vlc;
-        pic_param->vol_fields.bits.resync_marker_disable        = priv->vol_hdr.resync_marker_disable;
-        pic_param->no_of_sprite_warping_points                  = priv->vol_hdr.no_of_sprite_warping_points;
-
-        for (i=0; i<3 && i<priv->vol_hdr.no_of_sprite_warping_points ; i++) {
-            pic_param->sprite_trajectory_du[i]                  = priv->sprite_trajectory.vop_ref_points[i];
-            pic_param->sprite_trajectory_dv[i]                  = priv->sprite_trajectory.sprite_ref_points[i];
-        }
-        pic_param->quant_precision                              = priv->vol_hdr.quant_precision;
-        
-        // VOP parameters    
-        pic_param->vop_width                                    = vop_hdr->width;
-        pic_param->vop_height                                   = vop_hdr->height;
-        pic_param->vop_fields.bits.vop_coding_type              = vop_hdr->coding_type;
-        pic_param->vop_fields.bits.vop_rounding_type            = vop_hdr->rounding_type;
-        pic_param->vop_fields.bits.intra_dc_vlc_thr             = vop_hdr->intra_dc_vlc_thr;
-        pic_param->vop_fields.bits.top_field_first              = vop_hdr->top_field_first;
-        pic_param->vop_fields.bits.alternate_vertical_scan_flag = vop_hdr->alternate_vertical_scan_flag;
-
-        pic_param->vop_fcode_forward                            = vop_hdr->fcode_forward;
-        pic_param->vop_fcode_backward                           = vop_hdr->fcode_backward;
-        pic_param->vop_time_increment_resolution                = priv->vol_hdr.vop_time_increment_resolution;
-    }    
-
-    pic_param->TRB = 0;
-    pic_param->TRD = 0;
-    switch (priv->coding_type) {
+    pic_param->quant_precision = priv->vol_hdr.quant_precision;
+
+    // VOP parameters
+    pic_param->vop_width = vop_hdr->width;
+    pic_param->vop_height = vop_hdr->height;
+    pic_param->vop_fields.bits.vop_coding_type = vop_hdr->coding_type;
+    pic_param->vop_fields.bits.vop_rounding_type = vop_hdr->rounding_type;
+    pic_param->vop_fields.bits.intra_dc_vlc_thr = vop_hdr->intra_dc_vlc_thr;
+    pic_param->vop_fields.bits.top_field_first = vop_hdr->top_field_first;
+    pic_param->vop_fields.bits.alternate_vertical_scan_flag =
+        vop_hdr->alternate_vertical_scan_flag;
+
+    pic_param->vop_fcode_forward = vop_hdr->fcode_forward;
+    pic_param->vop_fcode_backward = vop_hdr->fcode_backward;
+    pic_param->vop_time_increment_resolution =
+        priv->vol_hdr.vop_time_increment_resolution;
+  }
+
+  pic_param->TRB = 0;
+  pic_param->TRD = 0;
+  switch (priv->coding_type) {
     case GST_MPEG4_B_VOP:
-        pic_param->TRB                                          = priv->trb;
-        pic_param->backward_reference_picture                   = priv->next_picture->surface_id;
-        pic_param->vop_fields.bits.backward_reference_vop_coding_type = get_vop_coding_type(priv->next_picture);
-        // fall-through
+      pic_param->TRB = priv->trb;
+      pic_param->backward_reference_picture = priv->next_picture->surface_id;
+      pic_param->vop_fields.bits.backward_reference_vop_coding_type =
+          get_vop_coding_type (priv->next_picture);
+      // fall-through
     case GST_MPEG4_P_VOP:
-        pic_param->TRD                                          = priv->trd;
-        if (priv->prev_picture)
-            pic_param->forward_reference_picture                = priv->prev_picture->surface_id;
-        break;
-    }
-
-    if (priv->vol_hdr.interlaced) {
-        priv->is_first_field ^= 1;
-    }
-    return TRUE;
+      pic_param->TRD = priv->trd;
+      if (priv->prev_picture)
+        pic_param->forward_reference_picture = priv->prev_picture->surface_id;
+      break;
+  }
+
+  if (priv->vol_hdr.interlaced) {
+    priv->is_first_field ^= 1;
+  }
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-decode_slice(
-    GstVaapiDecoderMpeg4 *decoder,
-    const guint8          *buf,
-    guint                 buf_size,
-    gboolean              has_packet_header
-)
+decode_slice (GstVaapiDecoderMpeg4 * decoder,
+    const guint8 * buf, guint buf_size, gboolean has_packet_header)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->curr_picture;
-    GstVaapiSlice *slice;
-    VASliceParameterBufferMPEG4 *slice_param;
-
-    GST_DEBUG("decoder silce: %p, %u bytes)", buf, buf_size);
-
-    // has_packet_header is ture for the 2+ slice
-    if (!has_packet_header && !fill_picture(decoder, picture))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-
-    slice = GST_VAAPI_SLICE_NEW(MPEG4, decoder, buf, buf_size);
-    if (!slice) {
-        GST_DEBUG("failed to allocate slice");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_add_slice(picture, slice);
-
-    /* Fill in VASliceParameterBufferMPEG4 */
-    slice_param = slice->param;
-    if (priv->is_svh) {
-        slice_param->macroblock_offset         = (priv->svh_hdr.size)%8;
-        slice_param->macroblock_number         = 0; 
-        // the header of first gob_layer is empty (gob_header_empty=1), use vop_quant
-        slice_param->quant_scale               = priv->svh_hdr.vop_quant; 
-    }
-    else {
-        if (has_packet_header) {
-            slice_param->macroblock_offset     = priv->packet_hdr.size % 8;
-            slice_param->macroblock_number     = priv->packet_hdr.macroblock_number;
-            slice_param->quant_scale           = priv->packet_hdr.quant_scale;
-       }    
-        else {
-            slice_param->macroblock_offset     = priv->vop_hdr.size % 8;
-            slice_param->macroblock_number     = 0;
-            slice_param->quant_scale           = priv->vop_hdr.quant;
-        }
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->curr_picture;
+  GstVaapiSlice *slice;
+  VASliceParameterBufferMPEG4 *slice_param;
+
+  GST_DEBUG ("decoder silce: %p, %u bytes)", buf, buf_size);
+
+  // has_packet_header is ture for the 2+ slice
+  if (!has_packet_header && !fill_picture (decoder, picture))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+  slice = GST_VAAPI_SLICE_NEW (MPEG4, decoder, buf, buf_size);
+  if (!slice) {
+    GST_DEBUG ("failed to allocate slice");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_add_slice (picture, slice);
+
+  /* Fill in VASliceParameterBufferMPEG4 */
+  slice_param = slice->param;
+  if (priv->is_svh) {
+    slice_param->macroblock_offset = (priv->svh_hdr.size) % 8;
+    slice_param->macroblock_number = 0;
+    // the header of first gob_layer is empty (gob_header_empty=1), use vop_quant
+    slice_param->quant_scale = priv->svh_hdr.vop_quant;
+  } else {
+    if (has_packet_header) {
+      slice_param->macroblock_offset = priv->packet_hdr.size % 8;
+      slice_param->macroblock_number = priv->packet_hdr.macroblock_number;
+      slice_param->quant_scale = priv->packet_hdr.quant_scale;
+    } else {
+      slice_param->macroblock_offset = priv->vop_hdr.size % 8;
+      slice_param->macroblock_number = 0;
+      slice_param->quant_scale = priv->vop_hdr.quant;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
+decode_packet (GstVaapiDecoderMpeg4 * decoder, GstMpeg4Packet packet)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstMpeg4Packet *tos = &packet;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstMpeg4Packet *tos = &packet;
+  GstVaapiDecoderStatus status;
+
+  // packet.size is the size from current marker to the next.
+  if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_START) {
+    status =
+        decode_sequence (decoder, packet.data + packet.offset, packet.size);
+  } else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
+    status = decode_sequence_end (decoder);
+  } else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
+    status =
+        decode_visual_object (decoder, packet.data + packet.offset,
+        packet.size);
+  } else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST
+      && tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
+    GST_WARNING
+        ("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
+    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+  } else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST
+      && tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
+    status =
+        decode_video_object_layer (decoder, packet.data + packet.offset,
+        packet.size);
+  } else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
+    status = decode_gop (decoder, packet.data + packet.offset, packet.size);
+  } else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
+    GstMpeg4Packet video_packet;
+    const guint8 *_data;
+    gint _data_size;
+
+    status = decode_picture (decoder, packet.data + packet.offset, packet.size);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+
+    /* decode slice
+     * A resync marker shall only be located immediately before a macroblock
+     * (or video packet header if exists) and aligned with a byte
+     * either start_code or resync_marker are scaned/measured by byte,
+     * while the header itself are parsed/measured in bit
+     * it means: resync_marker(video_packet_header) start from byte boundary,
+     * while MB doesn't start from byte boundary -- it is what 'macroblock_offset'
+     * in slice refer to
+     */
+    _data = packet.data + packet.offset + priv->vop_hdr.size / 8;
+    _data_size = packet.size - (priv->vop_hdr.size / 8);
 
-    // packet.size is the size from current marker to the next.
-    if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_START) {
-        status = decode_sequence(decoder, packet.data + packet.offset, packet.size);
-    }
-    else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
-        status = decode_sequence_end(decoder);
-    }
-    else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
-        status = decode_visual_object(decoder, packet.data + packet.offset, packet.size);
-    }
-    else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST && tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
-        GST_WARNING("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-    }
-    else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST && tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
-        status = decode_video_object_layer(decoder, packet.data + packet.offset, packet.size);
-    }
-    else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
-        status = decode_gop(decoder, packet.data + packet.offset, packet.size);
-    }
-    else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
-        GstMpeg4Packet video_packet;
-        const guint8 *_data;
-        gint  _data_size;
-
-        status = decode_picture(decoder, packet.data + packet.offset, packet.size);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-
-        /* decode slice
-         * A resync marker shall only be located immediately before a macroblock 
-         * (or video packet header if exists) and aligned with a byte
-         * either start_code or resync_marker are scaned/measured by byte, 
-         * while the header itself are parsed/measured in bit
-         * it means: resync_marker(video_packet_header) start from byte boundary, 
-         * while MB doesn't start from byte boundary -- it is what 'macroblock_offset' 
-         * in slice refer to
-         */
-        _data = packet.data + packet.offset + priv->vop_hdr.size/8;
-        _data_size = packet.size - (priv->vop_hdr.size/8);
-        
-        if (priv->vol_hdr.resync_marker_disable) {
-            status = decode_slice(decoder, _data, _data_size, FALSE);
-            if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-                return status;
+    if (priv->vol_hdr.resync_marker_disable) {
+      status = decode_slice (decoder, _data, _data_size, FALSE);
+      if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+        return status;
+    } else {
+      GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
+      gboolean first_slice = TRUE;
+
+      // next start_code is required to determine the end of last slice
+      _data_size += 4;
+
+      while (_data_size > 0) {
+        // we can skip user data here
+        ret =
+            gst_mpeg4_parse (&video_packet, TRUE, &priv->vop_hdr, _data, 0,
+            _data_size);
+        if (ret != GST_MPEG4_PARSER_OK) {
+          break;
         }
-        else {
-            GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
-            gboolean first_slice = TRUE;
-
-            // next start_code is required to determine the end of last slice
-            _data_size += 4;
-
-            while (_data_size > 0) {
-                // we can skip user data here
-                ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0,  _data_size);
-                if(ret != GST_MPEG4_PARSER_OK) {
-                    break;
-                }
-
-                if (first_slice) {
-                    status = decode_slice(decoder, _data, video_packet.size, FALSE);
-                    first_slice = FALSE;
-                }
-                else {
-                    _data += video_packet.offset;
-                    _data_size -= video_packet.offset;
-
-                    ret = gst_mpeg4_parse_video_packet_header (&priv->packet_hdr, &priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data, _data_size);
-                    status = decode_slice(decoder,_data + priv->packet_hdr.size/8, video_packet.size - priv->packet_hdr.size/8, TRUE); 
-                }
-
-                _data += video_packet.size;
-                _data_size -= video_packet.size;
-            }
+
+        if (first_slice) {
+          status = decode_slice (decoder, _data, video_packet.size, FALSE);
+          first_slice = FALSE;
+        } else {
+          _data += video_packet.offset;
+          _data_size -= video_packet.offset;
+
+          ret =
+              gst_mpeg4_parse_video_packet_header (&priv->packet_hdr,
+              &priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data,
+              _data_size);
+          status =
+              decode_slice (decoder, _data + priv->packet_hdr.size / 8,
+              video_packet.size - priv->packet_hdr.size / 8, TRUE);
         }
-        status = decode_current_picture(decoder);
-    }
-    else if (tos->type == GST_MPEG4_USER_DATA
-          || tos->type == GST_MPEG4_VIDEO_SESSION_ERR 
-          || tos->type == GST_MPEG4_FBA 
-          || tos->type == GST_MPEG4_FBA_PLAN 
-          || tos->type == GST_MPEG4_MESH 
-          || tos->type == GST_MPEG4_MESH_PLAN 
-          || tos->type == GST_MPEG4_STILL_TEXTURE_OBJ 
-          || tos->type == GST_MPEG4_TEXTURE_SPATIAL 
-          || tos->type == GST_MPEG4_TEXTURE_SNR_LAYER 
-          || tos->type == GST_MPEG4_TEXTURE_TILE 
-          || tos->type == GST_MPEG4_SHAPE_LAYER 
-          || tos->type == GST_MPEG4_STUFFING 
-          || tos->type == GST_MPEG4_SYSTEM_FIRST 
-          || tos->type == GST_MPEG4_SYSTEM_LAST) {
-        GST_WARNING("Ignore marker: %x\n", tos->type);
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-    }
-    else {
-        GST_ERROR("unsupported start code %x\n", tos->type);
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-    }
-    
-    return status;
-}
 
-static GstVaapiDecoderStatus
-decode_buffer(GstVaapiDecoderMpeg4 *decoder, const guchar *buf, guint buf_size)
-{
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-    GstMpeg4Packet packet;
-    guint ofs;
-
-    if (priv->is_svh) {
-        status = decode_picture(decoder, buf, buf_size);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-
-        ofs = priv->svh_hdr.size / 8;
-        status = decode_slice(decoder, buf + ofs, buf_size - ofs, FALSE);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
+        _data += video_packet.size;
+        _data_size -= video_packet.size;
+      }
     }
-    else {
-        packet.data   = buf;
-        packet.offset = 0;
-        packet.size   = buf_size;
-        packet.type   = (GstMpeg4StartCode)packet.data[0];
-
-        status = decode_packet(decoder, packet);
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    status = decode_current_picture (decoder);
+  } else if (tos->type == GST_MPEG4_USER_DATA
+      || tos->type == GST_MPEG4_VIDEO_SESSION_ERR
+      || tos->type == GST_MPEG4_FBA
+      || tos->type == GST_MPEG4_FBA_PLAN
+      || tos->type == GST_MPEG4_MESH
+      || tos->type == GST_MPEG4_MESH_PLAN
+      || tos->type == GST_MPEG4_STILL_TEXTURE_OBJ
+      || tos->type == GST_MPEG4_TEXTURE_SPATIAL
+      || tos->type == GST_MPEG4_TEXTURE_SNR_LAYER
+      || tos->type == GST_MPEG4_TEXTURE_TILE
+      || tos->type == GST_MPEG4_SHAPE_LAYER
+      || tos->type == GST_MPEG4_STUFFING
+      || tos->type == GST_MPEG4_SYSTEM_FIRST
+      || tos->type == GST_MPEG4_SYSTEM_LAST) {
+    GST_WARNING ("Ignore marker: %x\n", tos->type);
+    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+  } else {
+    GST_ERROR ("unsupported start code %x\n", tos->type);
+    status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+  }
+
+  return status;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg4_decode_codec_data(GstVaapiDecoder *base_decoder,
-    const guchar *_buf, guint _buf_size)
+decode_buffer (GstVaapiDecoderMpeg4 * decoder, const guchar * buf,
+    guint buf_size)
 {
-    GstVaapiDecoderMpeg4 * const decoder =
-        GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
-    GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-    GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
-    GstMpeg4Packet packet;
-    guchar *buf;
-    guint pos, buf_size;
-
-    // add additional 0x000001b2 to enclose the last header
-    buf_size = _buf_size + 4;
-    buf = malloc(buf_size);
-    memcpy(buf, _buf, buf_size);
-    buf[buf_size-4] = 0;
-    buf[buf_size-3] = 0;
-    buf[buf_size-2] = 1;
-    buf[buf_size-1] = 0xb2;
-
-    pos = 0;
-
-    while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
-        result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
-        if (result != GST_MPEG4_PARSER_OK) {
-            break;
-        }
-        status = decode_packet(decoder, packet);
-        if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
-            pos = packet.offset + packet.size; 
-        }
-        else {
-            GST_WARNING("decode mp4 packet failed when decoding codec data\n");
-            break;
-        }
-    }
-    free(buf);
-    return status;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+  GstMpeg4Packet packet;
+  guint ofs;
+
+  if (priv->is_svh) {
+    status = decode_picture (decoder, buf, buf_size);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+
+    ofs = priv->svh_hdr.size / 8;
+    status = decode_slice (decoder, buf + ofs, buf_size - ofs, FALSE);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  } else {
+    packet.data = buf;
+    packet.offset = 0;
+    packet.size = buf_size;
+    packet.type = (GstMpeg4StartCode) packet.data[0];
+
+    status = decode_packet (decoder, packet);
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-ensure_decoder(GstVaapiDecoderMpeg4 *decoder)
+gst_vaapi_decoder_mpeg4_decode_codec_data (GstVaapiDecoder * base_decoder,
+    const guchar * _buf, guint _buf_size)
 {
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-
-    if (!priv->is_opened) {
-        priv->is_opened = gst_vaapi_decoder_mpeg4_open(decoder);
-        if (!priv->is_opened)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
-
-        status = gst_vaapi_decoder_decode_codec_data(
-            GST_VAAPI_DECODER_CAST(decoder));
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
+  GstVaapiDecoderMpeg4 *const decoder =
+      GST_VAAPI_DECODER_MPEG4_CAST (base_decoder);
+  GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
+  GstMpeg4Packet packet;
+  guchar *buf;
+  guint pos, buf_size;
+
+  // add additional 0x000001b2 to enclose the last header
+  buf_size = _buf_size + 4;
+  buf = malloc (buf_size);
+  memcpy (buf, _buf, buf_size);
+  buf[buf_size - 4] = 0;
+  buf[buf_size - 3] = 0;
+  buf[buf_size - 2] = 1;
+  buf[buf_size - 1] = 0xb2;
+
+  pos = 0;
+
+  while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
+    result = gst_mpeg4_parse (&packet, FALSE, NULL, buf, pos, buf_size);
+    if (result != GST_MPEG4_PARSER_OK) {
+      break;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    status = decode_packet (decoder, packet);
+    if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
+      pos = packet.offset + packet.size;
+    } else {
+      GST_WARNING ("decode mp4 packet failed when decoding codec data\n");
+      break;
+    }
+  }
+  free (buf);
+  return status;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg4_parse(GstVaapiDecoder *base_decoder,
-    GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
+ensure_decoder (GstVaapiDecoderMpeg4 * decoder)
 {
-    GstVaapiDecoderMpeg4 * const decoder =
-        GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
-    GstVaapiDecoderMpeg4Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-    GstMpeg4Packet packet;
-    GstMpeg4ParseResult result;
-    const guchar *buf;
-    guint size, buf_size, flags = 0;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
 
-    size = gst_adapter_available(adapter);
-    buf = gst_adapter_map(adapter, size);
-    if (!buf)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+  if (!priv->is_opened) {
+    priv->is_opened = gst_vaapi_decoder_mpeg4_open (decoder);
+    if (!priv->is_opened)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
 
-    packet.type = GST_MPEG4_USER_DATA;
-    if (priv->is_svh)
-        result = gst_h263_parse(&packet, buf, 0, size);
-    else
-        result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, 0, size);
-    if (result == GST_MPEG4_PARSER_NO_PACKET_END && at_eos)
-        packet.size = size - packet.offset;
-    else if (result == GST_MPEG4_PARSER_ERROR)
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    else if (result != GST_MPEG4_PARSER_OK)
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+    status =
+        gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
 
-    buf_size = packet.size;
-    gst_adapter_flush(adapter, packet.offset);
-    unit->size = buf_size;
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg4_parse (GstVaapiDecoder * base_decoder,
+    GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
+{
+  GstVaapiDecoderMpeg4 *const decoder =
+      GST_VAAPI_DECODER_MPEG4_CAST (base_decoder);
+  GstVaapiDecoderMpeg4Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+  GstMpeg4Packet packet;
+  GstMpeg4ParseResult result;
+  const guchar *buf;
+  guint size, buf_size, flags = 0;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    /* Check for start of new picture */
-    switch (packet.type) {
+  size = gst_adapter_available (adapter);
+  buf = gst_adapter_map (adapter, size);
+  if (!buf)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  packet.type = GST_MPEG4_USER_DATA;
+  if (priv->is_svh)
+    result = gst_h263_parse (&packet, buf, 0, size);
+  else
+    result = gst_mpeg4_parse (&packet, FALSE, NULL, buf, 0, size);
+  if (result == GST_MPEG4_PARSER_NO_PACKET_END && at_eos)
+    packet.size = size - packet.offset;
+  else if (result == GST_MPEG4_PARSER_ERROR)
+    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  else if (result != GST_MPEG4_PARSER_OK)
+    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+  buf_size = packet.size;
+  gst_adapter_flush (adapter, packet.offset);
+  unit->size = buf_size;
+
+  /* Check for start of new picture */
+  switch (packet.type) {
     case GST_MPEG4_VIDEO_SESSION_ERR:
     case GST_MPEG4_FBA:
     case GST_MPEG4_FBA_PLAN:
@@ -1067,103 +1084,102 @@ gst_vaapi_decoder_mpeg4_parse(GstVaapiDecoder *base_decoder,
     case GST_MPEG4_TEXTURE_TILE:
     case GST_MPEG4_SHAPE_LAYER:
     case GST_MPEG4_STUFFING:
-        gst_adapter_flush(adapter, packet.size);
-        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      gst_adapter_flush (adapter, packet.size);
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
     case GST_MPEG4_USER_DATA:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+      break;
     case GST_MPEG4_VISUAL_OBJ_SEQ_END:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+      break;
     case GST_MPEG4_VIDEO_OBJ_PLANE:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        /* fall-through */
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      /* fall-through */
     case GST_MPEG4_VISUAL_OBJ_SEQ_START:
     case GST_MPEG4_VISUAL_OBJ:
     case GST_MPEG4_GROUP_OF_VOP:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
     default:
-        if (packet.type >= GST_MPEG4_VIDEO_OBJ_FIRST &&
-            packet.type <= GST_MPEG4_VIDEO_OBJ_LAST) {
-            gst_adapter_flush(adapter, packet.size);
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        }
-        if (packet.type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
-            packet.type <= GST_MPEG4_VIDEO_LAYER_LAST) {
-            break;
-        }
-        if (packet.type >= GST_MPEG4_SYSTEM_FIRST &&
-            packet.type <= GST_MPEG4_SYSTEM_LAST) {
-            flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
-            break;
-        }
-        GST_WARNING("unsupported start code (0x%02x)", packet.type);
-        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-    }
-    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      if (packet.type >= GST_MPEG4_VIDEO_OBJ_FIRST &&
+          packet.type <= GST_MPEG4_VIDEO_OBJ_LAST) {
+        gst_adapter_flush (adapter, packet.size);
+        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      }
+      if (packet.type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
+          packet.type <= GST_MPEG4_VIDEO_LAYER_LAST) {
+        break;
+      }
+      if (packet.type >= GST_MPEG4_SYSTEM_FIRST &&
+          packet.type <= GST_MPEG4_SYSTEM_LAST) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+        break;
+      }
+      GST_WARNING ("unsupported start code (0x%02x)", packet.type);
+      return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+  }
+  GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg4_decode(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_mpeg4_decode (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderMpeg4 * const decoder =
-        GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  GstVaapiDecoderMpeg4 *const decoder =
+      GST_VAAPI_DECODER_MPEG4_CAST (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    status = decode_buffer(decoder, map_info.data + unit->offset, unit->size);
-    gst_buffer_unmap(buffer, &map_info);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  status = decode_buffer (decoder, map_info.data + unit->offset, unit->size);
+  gst_buffer_unmap (buffer, &map_info);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-gst_vaapi_decoder_mpeg4_class_init(GstVaapiDecoderMpeg4Class *klass)
+gst_vaapi_decoder_mpeg4_class_init (GstVaapiDecoderMpeg4Class * klass)
 {
-    GstVaapiMiniObjectClass * const object_class =
-        GST_VAAPI_MINI_OBJECT_CLASS(klass);
-    GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
+  GstVaapiMiniObjectClass *const object_class =
+      GST_VAAPI_MINI_OBJECT_CLASS (klass);
+  GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
 
-    object_class->size          = sizeof(GstVaapiDecoderMpeg4);
-    object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
+  object_class->size = sizeof (GstVaapiDecoderMpeg4);
+  object_class->finalize = (GDestroyNotify) gst_vaapi_decoder_finalize;
 
-    decoder_class->create       = gst_vaapi_decoder_mpeg4_create;
-    decoder_class->destroy      = gst_vaapi_decoder_mpeg4_destroy;
-    decoder_class->parse        = gst_vaapi_decoder_mpeg4_parse;
-    decoder_class->decode       = gst_vaapi_decoder_mpeg4_decode;
+  decoder_class->create = gst_vaapi_decoder_mpeg4_create;
+  decoder_class->destroy = gst_vaapi_decoder_mpeg4_destroy;
+  decoder_class->parse = gst_vaapi_decoder_mpeg4_parse;
+  decoder_class->decode = gst_vaapi_decoder_mpeg4_decode;
 
-    decoder_class->decode_codec_data =
-        gst_vaapi_decoder_mpeg4_decode_codec_data;
+  decoder_class->decode_codec_data = gst_vaapi_decoder_mpeg4_decode_codec_data;
 }
 
 static inline const GstVaapiDecoderClass *
-gst_vaapi_decoder_mpeg4_class(void)
+gst_vaapi_decoder_mpeg4_class (void)
 {
-    static GstVaapiDecoderMpeg4Class g_class;
-    static gsize g_class_init = FALSE;
-
-    if (g_once_init_enter(&g_class_init)) {
-        gst_vaapi_decoder_mpeg4_class_init(&g_class);
-        g_once_init_leave(&g_class_init, TRUE);
-    }
-    return GST_VAAPI_DECODER_CLASS(&g_class);
+  static GstVaapiDecoderMpeg4Class g_class;
+  static gsize g_class_init = FALSE;
+
+  if (g_once_init_enter (&g_class_init)) {
+    gst_vaapi_decoder_mpeg4_class_init (&g_class);
+    g_once_init_leave (&g_class_init, TRUE);
+  }
+  return GST_VAAPI_DECODER_CLASS (&g_class);
 }
 
 /**
@@ -1177,8 +1193,8 @@ gst_vaapi_decoder_mpeg4_class(void)
  * Return value: the newly allocated #GstVaapiDecoder object
  */
 GstVaapiDecoder *
-gst_vaapi_decoder_mpeg4_new(GstVaapiDisplay *display, GstCaps *caps)
+gst_vaapi_decoder_mpeg4_new (GstVaapiDisplay * display, GstCaps * caps)
 {
-    return gst_vaapi_decoder_new(gst_vaapi_decoder_mpeg4_class(),
-        display, caps);
+  return gst_vaapi_decoder_new (gst_vaapi_decoder_mpeg4_class (),
+      display, caps);
 }
index 8ffbf07..fd08a83 100644 (file)
  * sub-classes.
  */
 void
-gst_vaapi_decoder_unit_init(GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_unit_init (GstVaapiDecoderUnit * unit)
 {
-    unit->flags = 0;
-    unit->size = 0;
-    unit->offset = 0;
+  unit->flags = 0;
+  unit->size = 0;
+  unit->offset = 0;
 
-    unit->parsed_info = NULL;
-    unit->parsed_info_destroy_notify = NULL;
+  unit->parsed_info = NULL;
+  unit->parsed_info_destroy_notify = NULL;
 }
 
 /**
@@ -59,9 +59,9 @@ gst_vaapi_decoder_unit_init(GstVaapiDecoderUnit *unit)
  * sub-classes.
  */
 void
-gst_vaapi_decoder_unit_clear(GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_unit_clear (GstVaapiDecoderUnit * unit)
 {
-    gst_vaapi_decoder_unit_set_parsed_info(unit, NULL, NULL);
+  gst_vaapi_decoder_unit_set_parsed_info (unit, NULL, NULL);
 }
 
 /**
@@ -77,13 +77,13 @@ gst_vaapi_decoder_unit_clear(GstVaapiDecoderUnit *unit)
  * function will be called before the @parsed_info is replaced.
  */
 void
-gst_vaapi_decoder_unit_set_parsed_info(GstVaapiDecoderUnit *unit,
+gst_vaapi_decoder_unit_set_parsed_info (GstVaapiDecoderUnit * unit,
     gpointer parsed_info, GDestroyNotify destroy_notify)
 {
-    g_return_if_fail(GST_VAAPI_IS_DECODER_UNIT(unit));
+  g_return_if_fail (GST_VAAPI_IS_DECODER_UNIT (unit));
 
-    if (unit->parsed_info && unit->parsed_info_destroy_notify)
-        unit->parsed_info_destroy_notify(unit->parsed_info);
-    unit->parsed_info = parsed_info;
-    unit->parsed_info_destroy_notify = destroy_notify;
+  if (unit->parsed_info && unit->parsed_info_destroy_notify)
+    unit->parsed_info_destroy_notify (unit->parsed_info);
+  unit->parsed_info = parsed_info;
+  unit->parsed_info_destroy_notify = destroy_notify;
 }
index 1aabe13..9289eac 100644 (file)
 #define GST_VAAPI_DECODER_VC1_CAST(decoder) \
     ((GstVaapiDecoderVC1 *)(decoder))
 
-typedef struct _GstVaapiDecoderVC1Private       GstVaapiDecoderVC1Private;
-typedef struct _GstVaapiDecoderVC1Class         GstVaapiDecoderVC1Class;
+typedef struct _GstVaapiDecoderVC1Private GstVaapiDecoderVC1Private;
+typedef struct _GstVaapiDecoderVC1Class GstVaapiDecoderVC1Class;
 
 /**
  * GstVaapiDecoderVC1:
  *
  * A decoder based on VC1.
  */
-struct _GstVaapiDecoderVC1Private {
-    GstVaapiProfile             profile;
-    guint                       width;
-    guint                       height;
-    GstVC1SeqHdr                seq_hdr;
-    GstVC1EntryPointHdr         entrypoint_hdr;
-    GstVC1FrameHdr              frame_hdr;
-    GstVC1BitPlanes            *bitplanes;
-    GstVaapiPicture            *current_picture;
-    GstVaapiPicture            *last_non_b_picture;
-    GstVaapiDpb                *dpb;
-    gint32                      next_poc;
-    guint8                     *rbdu_buffer;
-    guint8                      rndctrl;
-    guint                       rbdu_buffer_size;
-    guint                       is_opened               : 1;
-    guint                       is_first_field          : 1;
-    guint                       has_codec_data          : 1;
-    guint                       has_entrypoint          : 1;
-    guint                       size_changed            : 1;
-    guint                       profile_changed         : 1;
-    guint                       closed_entry            : 1;
-    guint                       broken_link             : 1;
+struct _GstVaapiDecoderVC1Private
+{
+  GstVaapiProfile profile;
+  guint width;
+  guint height;
+  GstVC1SeqHdr seq_hdr;
+  GstVC1EntryPointHdr entrypoint_hdr;
+  GstVC1FrameHdr frame_hdr;
+  GstVC1BitPlanes *bitplanes;
+  GstVaapiPicture *current_picture;
+  GstVaapiPicture *last_non_b_picture;
+  GstVaapiDpb *dpb;
+  gint32 next_poc;
+  guint8 *rbdu_buffer;
+  guint8 rndctrl;
+  guint rbdu_buffer_size;
+  guint is_opened:1;
+  guint is_first_field:1;
+  guint has_codec_data:1;
+  guint has_entrypoint:1;
+  guint size_changed:1;
+  guint profile_changed:1;
+  guint closed_entry:1;
+  guint broken_link:1;
 };
 
 /**
@@ -80,10 +81,11 @@ struct _GstVaapiDecoderVC1Private {
  *
  * A decoder based on VC1.
  */
-struct _GstVaapiDecoderVC1 {
-    /*< private >*/
-    GstVaapiDecoder             parent_instance;
-    GstVaapiDecoderVC1Private   priv;
+struct _GstVaapiDecoderVC1
+{
+  /*< private > */
+  GstVaapiDecoder parent_instance;
+  GstVaapiDecoderVC1Private priv;
 };
 
 /**
@@ -91,1339 +93,1339 @@ struct _GstVaapiDecoderVC1 {
  *
  * A decoder class based on VC1.
  */
-struct _GstVaapiDecoderVC1Class {
-    /*< private >*/
-    GstVaapiDecoderClass parent_class;
+struct _GstVaapiDecoderVC1Class
+{
+  /*< private > */
+  GstVaapiDecoderClass parent_class;
 };
 
 static GstVaapiDecoderStatus
-get_status(GstVC1ParserResult result)
+get_status (GstVC1ParserResult result)
 {
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderStatus status;
 
-    switch (result) {
+  switch (result) {
     case GST_VC1_PARSER_OK:
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      break;
     case GST_VC1_PARSER_NO_BDU_END:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      break;
     case GST_VC1_PARSER_ERROR:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        break;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
     default:
-        status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-        break;
-    }
-    return status;
+      status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+      break;
+  }
+  return status;
 }
 
 static void
-gst_vaapi_decoder_vc1_close(GstVaapiDecoderVC1 *decoder)
+gst_vaapi_decoder_vc1_close (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
 
-    gst_vaapi_picture_replace(&priv->last_non_b_picture, NULL);
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    gst_vaapi_dpb_replace(&priv->dpb, NULL);
+  gst_vaapi_picture_replace (&priv->last_non_b_picture, NULL);
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  gst_vaapi_dpb_replace (&priv->dpb, NULL);
 
-    if (priv->bitplanes) {
-        gst_vc1_bitplanes_free(priv->bitplanes);
-        priv->bitplanes = NULL;
-    }
+  if (priv->bitplanes) {
+    gst_vc1_bitplanes_free (priv->bitplanes);
+    priv->bitplanes = NULL;
+  }
 }
 
 static gboolean
-gst_vaapi_decoder_vc1_open(GstVaapiDecoderVC1 *decoder)
+gst_vaapi_decoder_vc1_open (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
 
-    gst_vaapi_decoder_vc1_close(decoder);
+  gst_vaapi_decoder_vc1_close (decoder);
 
-    priv->dpb = gst_vaapi_dpb_new(2);
-    if (!priv->dpb)
-        return FALSE;
+  priv->dpb = gst_vaapi_dpb_new (2);
+  if (!priv->dpb)
+    return FALSE;
 
-    priv->bitplanes = gst_vc1_bitplanes_new();
-    if (!priv->bitplanes)
-        return FALSE;
-    return TRUE;
+  priv->bitplanes = gst_vc1_bitplanes_new ();
+  if (!priv->bitplanes)
+    return FALSE;
+  return TRUE;
 }
 
 static void
-gst_vaapi_decoder_vc1_destroy(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_vc1_destroy (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
 
-    gst_vaapi_decoder_vc1_close(decoder);
+  gst_vaapi_decoder_vc1_close (decoder);
 
-    if (priv->rbdu_buffer) {
-        g_free(priv->rbdu_buffer);
-        priv->rbdu_buffer = NULL;
-        priv->rbdu_buffer_size = 0;
-    }
+  if (priv->rbdu_buffer) {
+    g_free (priv->rbdu_buffer);
+    priv->rbdu_buffer = NULL;
+    priv->rbdu_buffer_size = 0;
+  }
 }
 
 static gboolean
-gst_vaapi_decoder_vc1_create(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_vc1_create (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
 
-    priv->profile = (GstVaapiProfile)0;
-    priv->rndctrl = 0;
-    return TRUE;
+  priv->profile = (GstVaapiProfile) 0;
+  priv->rndctrl = 0;
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderVC1 *decoder)
+ensure_context (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiProfile profiles[2];
-    GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
-    guint i, n_profiles = 0;
-    gboolean reset_context = FALSE;
-
-    if (priv->profile_changed) {
-        GST_DEBUG("profile changed");
-        priv->profile_changed = FALSE;
-        reset_context         = TRUE;
-
-        profiles[n_profiles++] = priv->profile;
-        if (priv->profile == GST_VAAPI_PROFILE_VC1_SIMPLE)
-            profiles[n_profiles++] = GST_VAAPI_PROFILE_VC1_MAIN;
-
-        for (i = 0; i < n_profiles; i++) {
-            if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
-                                              profiles[i], entrypoint))
-                break;
-        }
-        if (i == n_profiles)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-        priv->profile = profiles[i];
-    }
-
-    if (priv->size_changed) {
-        GST_DEBUG("size changed");
-        priv->size_changed = FALSE;
-        reset_context      = TRUE;
-    }
-
-    if (reset_context) {
-        GstVaapiContextInfo info;
-
-        info.profile    = priv->profile;
-        info.entrypoint = entrypoint;
-        info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
-        info.width      = priv->width;
-        info.height     = priv->height;
-        info.ref_frames = 2;
-        reset_context   = gst_vaapi_decoder_ensure_context(
-            GST_VAAPI_DECODER(decoder),
-            &info
-        );
-        if (!reset_context)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiProfile profiles[2];
+  GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+  guint i, n_profiles = 0;
+  gboolean reset_context = FALSE;
+
+  if (priv->profile_changed) {
+    GST_DEBUG ("profile changed");
+    priv->profile_changed = FALSE;
+    reset_context = TRUE;
+
+    profiles[n_profiles++] = priv->profile;
+    if (priv->profile == GST_VAAPI_PROFILE_VC1_SIMPLE)
+      profiles[n_profiles++] = GST_VAAPI_PROFILE_VC1_MAIN;
+
+    for (i = 0; i < n_profiles; i++) {
+      if (gst_vaapi_display_has_decoder (GST_VAAPI_DECODER_DISPLAY (decoder),
+              profiles[i], entrypoint))
+        break;
     }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+    if (i == n_profiles)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+    priv->profile = profiles[i];
+  }
+
+  if (priv->size_changed) {
+    GST_DEBUG ("size changed");
+    priv->size_changed = FALSE;
+    reset_context = TRUE;
+  }
+
+  if (reset_context) {
+    GstVaapiContextInfo info;
+
+    info.profile = priv->profile;
+    info.entrypoint = entrypoint;
+    info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+    info.width = priv->width;
+    info.height = priv->height;
+    info.ref_frames = 2;
+    reset_context =
+        gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info);
+    if (!reset_context)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_current_picture(GstVaapiDecoderVC1 *decoder)
+decode_current_picture (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-
-    if (!picture)
-        return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-    if (!gst_vaapi_picture_decode(picture))
-        goto error;
-    if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
-        if (!gst_vaapi_dpb_add(priv->dpb, picture))
-            goto error;
-        gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    }
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
+
+  if (!picture)
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
+  if (!gst_vaapi_picture_decode (picture))
+    goto error;
+  if (GST_VAAPI_PICTURE_IS_COMPLETE (picture)) {
+    if (!gst_vaapi_dpb_add (priv->dpb, picture))
+      goto error;
+    gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
 error:
-    /* XXX: fix for cases where first field failed to be decoded */
-    gst_vaapi_picture_replace(&priv->current_picture, NULL);
-    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  /* XXX: fix for cases where first field failed to be decoded */
+  gst_vaapi_picture_replace (&priv->current_picture, NULL);
+  return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
+decode_sequence (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu,
+    GstVC1BDU * ebdu)
 {
-    GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1AdvancedSeqHdr * const adv_hdr = &seq_hdr->advanced;
-    GstVC1SeqStructC * const structc = &seq_hdr->struct_c;
-    GstVC1ParserResult result;
-    GstVaapiProfile profile;
-    guint width, height, fps_n, fps_d, par_n, par_d;
-
-    result = gst_vc1_parse_sequence_header(
-        rbdu->data + rbdu->offset,
-        rbdu->size,
-        seq_hdr
-    );
-    if (result != GST_VC1_PARSER_OK) {
-        GST_ERROR("failed to parse sequence layer");
-        return get_status(result);
-    }
-
-    priv->has_entrypoint = FALSE;
-
-    /* Reset POC */
-    if (priv->last_non_b_picture) {
-        if (priv->last_non_b_picture->poc == priv->next_poc)
-            priv->next_poc++;
-        gst_vaapi_picture_replace(&priv->last_non_b_picture, NULL);
-    }
-
-    /* Validate profile */
-    switch (seq_hdr->profile) {
+  GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1AdvancedSeqHdr *const adv_hdr = &seq_hdr->advanced;
+  GstVC1SeqStructC *const structc = &seq_hdr->struct_c;
+  GstVC1ParserResult result;
+  GstVaapiProfile profile;
+  guint width, height, fps_n, fps_d, par_n, par_d;
+
+  result = gst_vc1_parse_sequence_header (rbdu->data + rbdu->offset,
+      rbdu->size, seq_hdr);
+  if (result != GST_VC1_PARSER_OK) {
+    GST_ERROR ("failed to parse sequence layer");
+    return get_status (result);
+  }
+
+  priv->has_entrypoint = FALSE;
+
+  /* Reset POC */
+  if (priv->last_non_b_picture) {
+    if (priv->last_non_b_picture->poc == priv->next_poc)
+      priv->next_poc++;
+    gst_vaapi_picture_replace (&priv->last_non_b_picture, NULL);
+  }
+
+  /* Validate profile */
+  switch (seq_hdr->profile) {
     case GST_VC1_PROFILE_SIMPLE:
     case GST_VC1_PROFILE_MAIN:
     case GST_VC1_PROFILE_ADVANCED:
-        break;
+      break;
     default:
-        GST_ERROR("unsupported profile %d", seq_hdr->profile);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    }
-
-    fps_n = 0;
-    fps_d = 0;
-    par_n = 0;
-    par_d = 0;
-    switch (seq_hdr->profile) {
+      GST_ERROR ("unsupported profile %d", seq_hdr->profile);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  }
+
+  fps_n = 0;
+  fps_d = 0;
+  par_n = 0;
+  par_d = 0;
+  switch (seq_hdr->profile) {
     case GST_VC1_PROFILE_SIMPLE:
     case GST_VC1_PROFILE_MAIN:
-        if (structc->wmvp) {
-            fps_n = structc->framerate;
-            fps_d = 1;
-        }
-        break;
+      if (structc->wmvp) {
+        fps_n = structc->framerate;
+        fps_d = 1;
+      }
+      break;
     case GST_VC1_PROFILE_ADVANCED:
-        fps_n = adv_hdr->fps_n;
-        fps_d = adv_hdr->fps_d;
-        par_n = adv_hdr->par_n;
-        par_d = adv_hdr->par_d;
-        break;
+      fps_n = adv_hdr->fps_n;
+      fps_d = adv_hdr->fps_d;
+      par_n = adv_hdr->par_n;
+      par_d = adv_hdr->par_d;
+      break;
     default:
-        g_assert(0 && "XXX: we already validated the profile above");
-        break;
-    }
+      g_assert (0 && "XXX: we already validated the profile above");
+      break;
+  }
 
-    if (fps_n && fps_d)
-        gst_vaapi_decoder_set_framerate(base_decoder, fps_n, fps_d);
+  if (fps_n && fps_d)
+    gst_vaapi_decoder_set_framerate (base_decoder, fps_n, fps_d);
 
-    if (par_n > 0 && par_d > 0)
-        gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder, par_n, par_d);
+  if (par_n > 0 && par_d > 0)
+    gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder, par_n, par_d);
 
-    switch (seq_hdr->profile) {
+  switch (seq_hdr->profile) {
     case GST_VC1_PROFILE_SIMPLE:
     case GST_VC1_PROFILE_MAIN:
-        width  = seq_hdr->struct_c.coded_width;
-        height = seq_hdr->struct_c.coded_height;
-        break;
+      width = seq_hdr->struct_c.coded_width;
+      height = seq_hdr->struct_c.coded_height;
+      break;
     case GST_VC1_PROFILE_ADVANCED:
-        width  = seq_hdr->advanced.max_coded_width;
-        height = seq_hdr->advanced.max_coded_height;
-        break;
+      width = seq_hdr->advanced.max_coded_width;
+      height = seq_hdr->advanced.max_coded_height;
+      break;
     default:
-        g_assert(0 && "XXX: we already validated the profile above");
-        break;
-    }
+      g_assert (0 && "XXX: we already validated the profile above");
+      break;
+  }
 
-    if (priv->width != width) {
-        priv->width = width;
-        priv->size_changed = TRUE;
-    }
+  if (priv->width != width) {
+    priv->width = width;
+    priv->size_changed = TRUE;
+  }
 
-    if (priv->height != height) {
-        priv->height = height;
-        priv->size_changed = TRUE;
-    }
+  if (priv->height != height) {
+    priv->height = height;
+    priv->size_changed = TRUE;
+  }
 
-    switch (seq_hdr->profile) {
+  switch (seq_hdr->profile) {
     case GST_VC1_PROFILE_SIMPLE:
-        profile = GST_VAAPI_PROFILE_VC1_SIMPLE;
-        break;
+      profile = GST_VAAPI_PROFILE_VC1_SIMPLE;
+      break;
     case GST_VC1_PROFILE_MAIN:
-        profile = GST_VAAPI_PROFILE_VC1_MAIN;
-        break;
+      profile = GST_VAAPI_PROFILE_VC1_MAIN;
+      break;
     case GST_VC1_PROFILE_ADVANCED:
-        profile = GST_VAAPI_PROFILE_VC1_ADVANCED;
-        break;
+      profile = GST_VAAPI_PROFILE_VC1_ADVANCED;
+      break;
     default:
-        g_assert(0 && "XXX: we already validated the profile above");
-        break;
-    }
-    if (priv->profile != profile) {
-        priv->profile = profile;
-        priv->profile_changed = TRUE;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      g_assert (0 && "XXX: we already validated the profile above");
+      break;
+  }
+  if (priv->profile != profile) {
+    priv->profile = profile;
+    priv->profile_changed = TRUE;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderVC1 *decoder)
+decode_sequence_end (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
 
-    status = decode_current_picture(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  status = decode_current_picture (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    gst_vaapi_dpb_flush(priv->dpb);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_dpb_flush (priv->dpb);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_entry_point(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
+decode_entry_point (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu,
+    GstVC1BDU * ebdu)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1EntryPointHdr * const entrypoint_hdr = &priv->entrypoint_hdr;
-    GstVC1ParserResult result;
-
-    result = gst_vc1_parse_entry_point_header(
-        rbdu->data + rbdu->offset,
-        rbdu->size,
-        entrypoint_hdr,
-        seq_hdr
-    );
-    if (result != GST_VC1_PARSER_OK) {
-        GST_ERROR("failed to parse entrypoint layer");
-        return get_status(result);
-    }
-
-    if (entrypoint_hdr->coded_size_flag) {
-        priv->width        = entrypoint_hdr->coded_width;
-        priv->height       = entrypoint_hdr->coded_height;
-        priv->size_changed = TRUE;
-    }
-
-    priv->has_entrypoint = TRUE;
-    priv->closed_entry   = entrypoint_hdr->closed_entry;
-    priv->broken_link    = entrypoint_hdr->broken_link;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1EntryPointHdr *const entrypoint_hdr = &priv->entrypoint_hdr;
+  GstVC1ParserResult result;
+
+  result = gst_vc1_parse_entry_point_header (rbdu->data + rbdu->offset,
+      rbdu->size, entrypoint_hdr, seq_hdr);
+  if (result != GST_VC1_PARSER_OK) {
+    GST_ERROR ("failed to parse entrypoint layer");
+    return get_status (result);
+  }
+
+  if (entrypoint_hdr->coded_size_flag) {
+    priv->width = entrypoint_hdr->coded_width;
+    priv->height = entrypoint_hdr->coded_height;
+    priv->size_changed = TRUE;
+  }
+
+  priv->has_entrypoint = TRUE;
+  priv->closed_entry = entrypoint_hdr->closed_entry;
+  priv->broken_link = entrypoint_hdr->broken_link;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 /* Reconstruct bitstream PTYPE (7.1.1.4, index into Table-35) */
 static guint
-get_PTYPE(guint ptype)
+get_PTYPE (guint ptype)
 {
-    switch (ptype) {
-    case GST_VC1_PICTURE_TYPE_I:  return 0;
-    case GST_VC1_PICTURE_TYPE_P:  return 1;
-    case GST_VC1_PICTURE_TYPE_B:  return 2;
-    case GST_VC1_PICTURE_TYPE_BI: return 3;
-    }
-    return 4; /* skipped P-frame */
+  switch (ptype) {
+    case GST_VC1_PICTURE_TYPE_I:
+      return 0;
+    case GST_VC1_PICTURE_TYPE_P:
+      return 1;
+    case GST_VC1_PICTURE_TYPE_B:
+      return 2;
+    case GST_VC1_PICTURE_TYPE_BI:
+      return 3;
+  }
+  return 4;                     /* skipped P-frame */
 }
 
 /* Reconstruct bitstream BFRACTION (7.1.1.14, index into Table-40) */
 static guint
-get_BFRACTION(guint bfraction)
+get_BFRACTION (guint bfraction)
 {
-    guint i;
+  guint i;
+
+  static const struct
+  {
+    guint16 index;
+    guint16 value;
+  }
+  bfraction_map[] = {
+    {
+    0, GST_VC1_BFRACTION_BASIS / 2}, {
+    1, GST_VC1_BFRACTION_BASIS / 3}, {
+    2, (GST_VC1_BFRACTION_BASIS * 2) / 3}, {
+    3, GST_VC1_BFRACTION_BASIS / 4}, {
+    4, (GST_VC1_BFRACTION_BASIS * 3) / 4}, {
+    5, GST_VC1_BFRACTION_BASIS / 5}, {
+    6, (GST_VC1_BFRACTION_BASIS * 2) / 5}, {
+    7, (GST_VC1_BFRACTION_BASIS * 3) / 5}, {
+    8, (GST_VC1_BFRACTION_BASIS * 4) / 5}, {
+    9, GST_VC1_BFRACTION_BASIS / 6}, {
+    10, (GST_VC1_BFRACTION_BASIS * 5) / 6}, {
+    11, GST_VC1_BFRACTION_BASIS / 7}, {
+    12, (GST_VC1_BFRACTION_BASIS * 2) / 7}, {
+    13, (GST_VC1_BFRACTION_BASIS * 3) / 7}, {
+    14, (GST_VC1_BFRACTION_BASIS * 4) / 7}, {
+    15, (GST_VC1_BFRACTION_BASIS * 5) / 7}, {
+    16, (GST_VC1_BFRACTION_BASIS * 6) / 7}, {
+    17, GST_VC1_BFRACTION_BASIS / 8}, {
+    18, (GST_VC1_BFRACTION_BASIS * 3) / 8}, {
+    19, (GST_VC1_BFRACTION_BASIS * 5) / 8}, {
+    20, (GST_VC1_BFRACTION_BASIS * 7) / 8}, {
+    21, GST_VC1_BFRACTION_RESERVED}, {
+    22, GST_VC1_BFRACTION_PTYPE_BI}
+  };
+
+  if (!bfraction)
+    return 0;
 
-    static const struct {
-        guint16 index;
-        guint16 value;
-    }
-    bfraction_map[] = {
-        {  0,  GST_VC1_BFRACTION_BASIS      / 2 },
-        {  1,  GST_VC1_BFRACTION_BASIS      / 3 },
-        {  2, (GST_VC1_BFRACTION_BASIS * 2) / 3 },
-        {  3,  GST_VC1_BFRACTION_BASIS      / 4 },
-        {  4, (GST_VC1_BFRACTION_BASIS * 3) / 4 },
-        {  5,  GST_VC1_BFRACTION_BASIS      / 5 },
-        {  6, (GST_VC1_BFRACTION_BASIS * 2) / 5 },
-        {  7, (GST_VC1_BFRACTION_BASIS * 3) / 5 },
-        {  8, (GST_VC1_BFRACTION_BASIS * 4) / 5 },
-        {  9,  GST_VC1_BFRACTION_BASIS      / 6 },
-        { 10, (GST_VC1_BFRACTION_BASIS * 5) / 6 },
-        { 11,  GST_VC1_BFRACTION_BASIS      / 7 },
-        { 12, (GST_VC1_BFRACTION_BASIS * 2) / 7 },
-        { 13, (GST_VC1_BFRACTION_BASIS * 3) / 7 },
-        { 14, (GST_VC1_BFRACTION_BASIS * 4) / 7 },
-        { 15, (GST_VC1_BFRACTION_BASIS * 5) / 7 },
-        { 16, (GST_VC1_BFRACTION_BASIS * 6) / 7 },
-        { 17,  GST_VC1_BFRACTION_BASIS      / 8 },
-        { 18, (GST_VC1_BFRACTION_BASIS * 3) / 8 },
-        { 19, (GST_VC1_BFRACTION_BASIS * 5) / 8 },
-        { 20, (GST_VC1_BFRACTION_BASIS * 7) / 8 },
-        { 21,  GST_VC1_BFRACTION_RESERVED },
-        { 22,  GST_VC1_BFRACTION_PTYPE_BI }
-    };
-
-    if (!bfraction)
-        return 0;
-
-    for (i = 0; i < G_N_ELEMENTS(bfraction_map); i++) {
-        if (bfraction_map[i].value == bfraction)
-            return bfraction_map[i].index;
-    }
-    return 21; /* RESERVED */
+  for (i = 0; i < G_N_ELEMENTS (bfraction_map); i++) {
+    if (bfraction_map[i].value == bfraction)
+      return bfraction_map[i].index;
+  }
+  return 21;                    /* RESERVED */
 }
 
 /* Translate GStreamer MV modes to VA-API */
 static guint
-get_VAMvModeVC1(guint mvmode)
+get_VAMvModeVC1 (guint mvmode)
 {
-    switch (mvmode) {
-    case GST_VC1_MVMODE_1MV_HPEL_BILINEAR: return VAMvMode1MvHalfPelBilinear;
-    case GST_VC1_MVMODE_1MV:               return VAMvMode1Mv;
-    case GST_VC1_MVMODE_1MV_HPEL:          return VAMvMode1MvHalfPel;
-    case GST_VC1_MVMODE_MIXED_MV:          return VAMvModeMixedMv;
-    case GST_VC1_MVMODE_INTENSITY_COMP:    return VAMvModeIntensityCompensation;
-    }
-    return 0;
+  switch (mvmode) {
+    case GST_VC1_MVMODE_1MV_HPEL_BILINEAR:
+      return VAMvMode1MvHalfPelBilinear;
+    case GST_VC1_MVMODE_1MV:
+      return VAMvMode1Mv;
+    case GST_VC1_MVMODE_1MV_HPEL:
+      return VAMvMode1MvHalfPel;
+    case GST_VC1_MVMODE_MIXED_MV:
+      return VAMvModeMixedMv;
+    case GST_VC1_MVMODE_INTENSITY_COMP:
+      return VAMvModeIntensityCompensation;
+  }
+  return 0;
 }
 
 /* Reconstruct bitstream MVMODE (7.1.1.32) */
 static guint
-get_MVMODE(GstVC1FrameHdr *frame_hdr)
+get_MVMODE (GstVC1FrameHdr * frame_hdr)
 {
-    guint mvmode;
+  guint mvmode;
 
-    if (frame_hdr->profile == GST_VC1_PROFILE_ADVANCED)
-        mvmode = frame_hdr->pic.advanced.mvmode;
-    else
-        mvmode = frame_hdr->pic.simple.mvmode;
+  if (frame_hdr->profile == GST_VC1_PROFILE_ADVANCED)
+    mvmode = frame_hdr->pic.advanced.mvmode;
+  else
+    mvmode = frame_hdr->pic.simple.mvmode;
 
-    if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P ||
-        frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B)
-        return get_VAMvModeVC1(mvmode);
-    return 0;
+  if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P ||
+      frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B)
+    return get_VAMvModeVC1 (mvmode);
+  return 0;
 }
 
 /* Reconstruct bitstream MVMODE2 (7.1.1.33) */
 static guint
-get_MVMODE2(GstVC1FrameHdr *frame_hdr)
+get_MVMODE2 (GstVC1FrameHdr * frame_hdr)
 {
-    guint mvmode, mvmode2;
-
-    if (frame_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
-        mvmode  = frame_hdr->pic.advanced.mvmode;
-        mvmode2 = frame_hdr->pic.advanced.mvmode2;
-    }
-    else {
-        mvmode  = frame_hdr->pic.simple.mvmode;
-        mvmode2 = frame_hdr->pic.simple.mvmode2;
-    }
-
-    if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P &&
-        mvmode == GST_VC1_MVMODE_INTENSITY_COMP)
-        return get_VAMvModeVC1(mvmode2);
-    return 0;
+  guint mvmode, mvmode2;
+
+  if (frame_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
+    mvmode = frame_hdr->pic.advanced.mvmode;
+    mvmode2 = frame_hdr->pic.advanced.mvmode2;
+  } else {
+    mvmode = frame_hdr->pic.simple.mvmode;
+    mvmode2 = frame_hdr->pic.simple.mvmode2;
+  }
+
+  if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P &&
+      mvmode == GST_VC1_MVMODE_INTENSITY_COMP)
+    return get_VAMvModeVC1 (mvmode2);
+  return 0;
 }
 
 static inline int
-has_MVTYPEMB_bitplane(GstVaapiDecoderVC1 *decoder)
+has_MVTYPEMB_bitplane (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    guint mvmode, mvmode2;
-
-    if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
-        GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-        if (pic->mvtypemb)
-            return 0;
-        mvmode  = pic->mvmode;
-        mvmode2 = pic->mvmode2;
-    }
-    else {
-        GstVC1PicSimpleMain * const pic = &frame_hdr->pic.simple;
-        if (pic->mvtypemb)
-            return 0;
-        mvmode  = pic->mvmode;
-        mvmode2 = pic->mvmode2;
-    }
-    return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P &&
-            (mvmode == GST_VC1_MVMODE_MIXED_MV ||
-             (mvmode == GST_VC1_MVMODE_INTENSITY_COMP &&
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  guint mvmode, mvmode2;
+
+  if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
+    GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+    if (pic->mvtypemb)
+      return 0;
+    mvmode = pic->mvmode;
+    mvmode2 = pic->mvmode2;
+  } else {
+    GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
+    if (pic->mvtypemb)
+      return 0;
+    mvmode = pic->mvmode;
+    mvmode2 = pic->mvmode2;
+  }
+  return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P &&
+      (mvmode == GST_VC1_MVMODE_MIXED_MV ||
+          (mvmode == GST_VC1_MVMODE_INTENSITY_COMP &&
               mvmode2 == GST_VC1_MVMODE_MIXED_MV)));
 }
 
 static inline int
-has_SKIPMB_bitplane(GstVaapiDecoderVC1 *decoder)
+has_SKIPMB_bitplane (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-
-    if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
-        GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-        if (pic->skipmb)
-            return 0;
-    }
-    else {
-        GstVC1PicSimpleMain * const pic = &frame_hdr->pic.simple;
-        if (pic->skipmb)
-            return 0;
-    }
-    return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P ||
-            frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+
+  if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
+    GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+    if (pic->skipmb)
+      return 0;
+  } else {
+    GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
+    if (pic->skipmb)
+      return 0;
+  }
+  return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P ||
+      frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B);
 }
 
 static inline int
-has_DIRECTMB_bitplane(GstVaapiDecoderVC1 *decoder)
+has_DIRECTMB_bitplane (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-
-    if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
-        GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-        if (pic->directmb)
-            return 0;
-    }
-    else {
-        GstVC1PicSimpleMain * const pic = &frame_hdr->pic.simple;
-        if (pic->directmb)
-            return 0;
-    }
-    return frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+
+  if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
+    GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+    if (pic->directmb)
+      return 0;
+  } else {
+    GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
+    if (pic->directmb)
+      return 0;
+  }
+  return frame_hdr->ptype == GST_VC1_PICTURE_TYPE_B;
 }
 
 static inline int
-has_ACPRED_bitplane(GstVaapiDecoderVC1 *decoder)
+has_ACPRED_bitplane (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-
-    if (seq_hdr->profile != GST_VC1_PROFILE_ADVANCED)
-        return 0;
-    if (pic->acpred)
-        return 0;
-    return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
-            frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+
+  if (seq_hdr->profile != GST_VC1_PROFILE_ADVANCED)
+    return 0;
+  if (pic->acpred)
+    return 0;
+  return (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
+      frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI);
 }
 
 static inline int
-has_OVERFLAGS_bitplane(GstVaapiDecoderVC1 *decoder)
+has_OVERFLAGS_bitplane (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1EntryPointHdr * const entrypoint_hdr = &priv->entrypoint_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-
-    if (seq_hdr->profile != GST_VC1_PROFILE_ADVANCED)
-        return 0;
-    if (pic->overflags)
-        return 0;
-    return ((frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
-             frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI) &&
-            (entrypoint_hdr->overlap && frame_hdr->pquant <= 8) &&
-            pic->condover == GST_VC1_CONDOVER_SELECT);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1EntryPointHdr *const entrypoint_hdr = &priv->entrypoint_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+
+  if (seq_hdr->profile != GST_VC1_PROFILE_ADVANCED)
+    return 0;
+  if (pic->overflags)
+    return 0;
+  return ((frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
+          frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI) &&
+      (entrypoint_hdr->overlap && frame_hdr->pquant <= 8) &&
+      pic->condover == GST_VC1_CONDOVER_SELECT);
 }
 
 static inline void
-pack_bitplanes(GstVaapiBitPlane *bitplane, guint n, const guint8 *bitplanes[3], guint x, guint y, guint stride)
+pack_bitplanes (GstVaapiBitPlane * bitplane, guint n,
+    const guint8 * bitplanes[3], guint x, guint y, guint stride)
 {
-    const guint dst_index = n / 2;
-    const guint src_index = y * stride + x;
-    guint8 v = 0;
-
-    if (bitplanes[0])
-        v |= bitplanes[0][src_index];
-    if (bitplanes[1])
-        v |= bitplanes[1][src_index] << 1;
-    if (bitplanes[2])
-        v |= bitplanes[2][src_index] << 2;
-    bitplane->data[dst_index] = (bitplane->data[dst_index] << 4) | v;
+  const guint dst_index = n / 2;
+  const guint src_index = y * stride + x;
+  guint8 v = 0;
+
+  if (bitplanes[0])
+    v |= bitplanes[0][src_index];
+  if (bitplanes[1])
+    v |= bitplanes[1][src_index] << 1;
+  if (bitplanes[2])
+    v |= bitplanes[2][src_index] << 2;
+  bitplane->data[dst_index] = (bitplane->data[dst_index] << 4) | v;
 }
 
 static gboolean
-fill_picture_structc(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
+fill_picture_structc (GstVaapiDecoderVC1 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    VAPictureParameterBufferVC1 * const pic_param = picture->param;
-    GstVC1SeqStructC * const structc = &priv->seq_hdr.struct_c;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1PicSimpleMain * const pic = &frame_hdr->pic.simple;
-
-    /* Fill in VAPictureParameterBufferVC1 (simple/main profile bits) */
-    pic_param->sequence_fields.bits.finterpflag                     = structc->finterpflag;
-    pic_param->sequence_fields.bits.multires                        = structc->multires;
-    pic_param->sequence_fields.bits.overlap                         = structc->overlap;
-    pic_param->sequence_fields.bits.syncmarker                      = structc->syncmarker;
-    pic_param->sequence_fields.bits.rangered                        = structc->rangered;
-    pic_param->sequence_fields.bits.max_b_frames                    = structc->maxbframes;
-    pic_param->conditional_overlap_flag                             = 0; /* advanced profile only */
-    pic_param->fast_uvmc_flag                                       = structc->fastuvmc;
-    pic_param->b_picture_fraction                                   = get_BFRACTION(pic->bfraction);
-    pic_param->cbp_table                                            = pic->cbptab;
-    pic_param->mb_mode_table                                        = 0; /* XXX: interlaced frame */
-    pic_param->range_reduction_frame                                = pic->rangeredfrm;
-    pic_param->post_processing                                      = 0; /* advanced profile only */
-    pic_param->picture_resolution_index                             = pic->respic;
-    pic_param->luma_scale                                           = pic->lumscale;
-    pic_param->luma_shift                                           = pic->lumshift;
-    pic_param->raw_coding.flags.mv_type_mb                          = pic->mvtypemb;
-    pic_param->raw_coding.flags.direct_mb                           = pic->directmb;
-    pic_param->raw_coding.flags.skip_mb                             = pic->skipmb;
-    pic_param->bitplane_present.flags.bp_mv_type_mb                 = has_MVTYPEMB_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_direct_mb                  = has_DIRECTMB_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_skip_mb                    = has_SKIPMB_bitplane(decoder);
-    pic_param->mv_fields.bits.mv_table                              = pic->mvtab;
-    pic_param->mv_fields.bits.extended_mv_flag                      = structc->extended_mv;
-    pic_param->mv_fields.bits.extended_mv_range                     = pic->mvrange;
-    pic_param->transform_fields.bits.variable_sized_transform_flag  = structc->vstransform;
-    pic_param->transform_fields.bits.mb_level_transform_type_flag   = pic->ttmbf;
-    pic_param->transform_fields.bits.frame_level_transform_type     = pic->ttfrm;
-    pic_param->transform_fields.bits.transform_ac_codingset_idx2    = pic->transacfrm2;
-
-    /* Refer to 8.3.7 Rounding control for Simple and Main Profile  */
-    if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
-        frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI)
-        priv->rndctrl = 1;
-    else if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P)
-        priv->rndctrl ^= 1;
-
-    pic_param->rounding_control = priv->rndctrl;
-
-    return TRUE;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  VAPictureParameterBufferVC1 *const pic_param = picture->param;
+  GstVC1SeqStructC *const structc = &priv->seq_hdr.struct_c;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1PicSimpleMain *const pic = &frame_hdr->pic.simple;
+
+  /* Fill in VAPictureParameterBufferVC1 (simple/main profile bits) */
+  pic_param->sequence_fields.bits.finterpflag = structc->finterpflag;
+  pic_param->sequence_fields.bits.multires = structc->multires;
+  pic_param->sequence_fields.bits.overlap = structc->overlap;
+  pic_param->sequence_fields.bits.syncmarker = structc->syncmarker;
+  pic_param->sequence_fields.bits.rangered = structc->rangered;
+  pic_param->sequence_fields.bits.max_b_frames = structc->maxbframes;
+  pic_param->conditional_overlap_flag = 0;      /* advanced profile only */
+  pic_param->fast_uvmc_flag = structc->fastuvmc;
+  pic_param->b_picture_fraction = get_BFRACTION (pic->bfraction);
+  pic_param->cbp_table = pic->cbptab;
+  pic_param->mb_mode_table = 0; /* XXX: interlaced frame */
+  pic_param->range_reduction_frame = pic->rangeredfrm;
+  pic_param->post_processing = 0;       /* advanced profile only */
+  pic_param->picture_resolution_index = pic->respic;
+  pic_param->luma_scale = pic->lumscale;
+  pic_param->luma_shift = pic->lumshift;
+  pic_param->raw_coding.flags.mv_type_mb = pic->mvtypemb;
+  pic_param->raw_coding.flags.direct_mb = pic->directmb;
+  pic_param->raw_coding.flags.skip_mb = pic->skipmb;
+  pic_param->bitplane_present.flags.bp_mv_type_mb =
+      has_MVTYPEMB_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_direct_mb =
+      has_DIRECTMB_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_skip_mb = has_SKIPMB_bitplane (decoder);
+  pic_param->mv_fields.bits.mv_table = pic->mvtab;
+  pic_param->mv_fields.bits.extended_mv_flag = structc->extended_mv;
+  pic_param->mv_fields.bits.extended_mv_range = pic->mvrange;
+  pic_param->transform_fields.bits.variable_sized_transform_flag =
+      structc->vstransform;
+  pic_param->transform_fields.bits.mb_level_transform_type_flag = pic->ttmbf;
+  pic_param->transform_fields.bits.frame_level_transform_type = pic->ttfrm;
+  pic_param->transform_fields.bits.transform_ac_codingset_idx2 =
+      pic->transacfrm2;
+
+  /* Refer to 8.3.7 Rounding control for Simple and Main Profile  */
+  if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_I ||
+      frame_hdr->ptype == GST_VC1_PICTURE_TYPE_BI)
+    priv->rndctrl = 1;
+  else if (frame_hdr->ptype == GST_VC1_PICTURE_TYPE_P)
+    priv->rndctrl ^= 1;
+
+  pic_param->rounding_control = priv->rndctrl;
+
+  return TRUE;
 }
 
 static gboolean
-fill_picture_advanced(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
+fill_picture_advanced (GstVaapiDecoderVC1 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    VAPictureParameterBufferVC1 * const pic_param = picture->param;
-    GstVC1AdvancedSeqHdr * const adv_hdr = &priv->seq_hdr.advanced;
-    GstVC1EntryPointHdr * const entrypoint_hdr = &priv->entrypoint_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1PicAdvanced * const pic = &frame_hdr->pic.advanced;
-
-    if (!priv->has_entrypoint)
-        return FALSE;
-
-    /* Fill in VAPictureParameterBufferVC1 (advanced profile bits) */
-    pic_param->sequence_fields.bits.pulldown                        = adv_hdr->pulldown;
-    pic_param->sequence_fields.bits.interlace                       = adv_hdr->interlace;
-    pic_param->sequence_fields.bits.tfcntrflag                      = adv_hdr->tfcntrflag;
-    pic_param->sequence_fields.bits.finterpflag                     = adv_hdr->finterpflag;
-    pic_param->sequence_fields.bits.psf                             = adv_hdr->psf;
-    pic_param->sequence_fields.bits.overlap                         = entrypoint_hdr->overlap;
-    pic_param->entrypoint_fields.bits.broken_link                   = entrypoint_hdr->broken_link;
-    pic_param->entrypoint_fields.bits.closed_entry                  = entrypoint_hdr->closed_entry;
-    pic_param->entrypoint_fields.bits.panscan_flag                  = entrypoint_hdr->panscan_flag;
-    pic_param->entrypoint_fields.bits.loopfilter                    = entrypoint_hdr->loopfilter;
-    pic_param->conditional_overlap_flag                             = pic->condover;
-    pic_param->fast_uvmc_flag                                       = entrypoint_hdr->fastuvmc;
-    pic_param->range_mapping_fields.bits.luma_flag                  = entrypoint_hdr->range_mapy_flag;
-    pic_param->range_mapping_fields.bits.luma                       = entrypoint_hdr->range_mapy;
-    pic_param->range_mapping_fields.bits.chroma_flag                = entrypoint_hdr->range_mapuv_flag;
-    pic_param->range_mapping_fields.bits.chroma                     = entrypoint_hdr->range_mapuv;
-    pic_param->b_picture_fraction                                   = get_BFRACTION(pic->bfraction);
-    pic_param->cbp_table                                            = pic->cbptab;
-    pic_param->mb_mode_table                                        = 0; /* XXX: interlaced frame */
-    pic_param->range_reduction_frame                                = 0; /* simple/main profile only */
-    pic_param->rounding_control                                     = pic->rndctrl;
-    pic_param->post_processing                                      = pic->postproc;
-    pic_param->picture_resolution_index                             = 0; /* simple/main profile only */
-    pic_param->luma_scale                                           = pic->lumscale;
-    pic_param->luma_shift                                           = pic->lumshift;
-    pic_param->picture_fields.bits.frame_coding_mode                = pic->fcm;
-    pic_param->picture_fields.bits.top_field_first                  = pic->tff;
-    pic_param->picture_fields.bits.is_first_field                   = pic->fcm == 0; /* XXX: interlaced frame */
-    pic_param->picture_fields.bits.intensity_compensation           = pic->mvmode == GST_VC1_MVMODE_INTENSITY_COMP;
-    pic_param->raw_coding.flags.mv_type_mb                          = pic->mvtypemb;
-    pic_param->raw_coding.flags.direct_mb                           = pic->directmb;
-    pic_param->raw_coding.flags.skip_mb                             = pic->skipmb;
-    pic_param->raw_coding.flags.ac_pred                             = pic->acpred;
-    pic_param->raw_coding.flags.overflags                           = pic->overflags;
-    pic_param->bitplane_present.flags.bp_mv_type_mb                 = has_MVTYPEMB_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_direct_mb                  = has_DIRECTMB_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_skip_mb                    = has_SKIPMB_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_ac_pred                    = has_ACPRED_bitplane(decoder);
-    pic_param->bitplane_present.flags.bp_overflags                  = has_OVERFLAGS_bitplane(decoder);
-    pic_param->reference_fields.bits.reference_distance_flag        = entrypoint_hdr->refdist_flag;
-    pic_param->mv_fields.bits.mv_table                              = pic->mvtab;
-    pic_param->mv_fields.bits.extended_mv_flag                      = entrypoint_hdr->extended_mv;
-    pic_param->mv_fields.bits.extended_mv_range                     = pic->mvrange;
-    pic_param->mv_fields.bits.extended_dmv_flag                     = entrypoint_hdr->extended_dmv;
-    pic_param->pic_quantizer_fields.bits.dquant                     = entrypoint_hdr->dquant;
-    pic_param->pic_quantizer_fields.bits.quantizer                  = entrypoint_hdr->quantizer;
-    pic_param->transform_fields.bits.variable_sized_transform_flag  = entrypoint_hdr->vstransform;
-    pic_param->transform_fields.bits.mb_level_transform_type_flag   = pic->ttmbf;
-    pic_param->transform_fields.bits.frame_level_transform_type     = pic->ttfrm;
-    pic_param->transform_fields.bits.transform_ac_codingset_idx2    = pic->transacfrm2;
-    return TRUE;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  VAPictureParameterBufferVC1 *const pic_param = picture->param;
+  GstVC1AdvancedSeqHdr *const adv_hdr = &priv->seq_hdr.advanced;
+  GstVC1EntryPointHdr *const entrypoint_hdr = &priv->entrypoint_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1PicAdvanced *const pic = &frame_hdr->pic.advanced;
+
+  if (!priv->has_entrypoint)
+    return FALSE;
+
+  /* Fill in VAPictureParameterBufferVC1 (advanced profile bits) */
+  pic_param->sequence_fields.bits.pulldown = adv_hdr->pulldown;
+  pic_param->sequence_fields.bits.interlace = adv_hdr->interlace;
+  pic_param->sequence_fields.bits.tfcntrflag = adv_hdr->tfcntrflag;
+  pic_param->sequence_fields.bits.finterpflag = adv_hdr->finterpflag;
+  pic_param->sequence_fields.bits.psf = adv_hdr->psf;
+  pic_param->sequence_fields.bits.overlap = entrypoint_hdr->overlap;
+  pic_param->entrypoint_fields.bits.broken_link = entrypoint_hdr->broken_link;
+  pic_param->entrypoint_fields.bits.closed_entry = entrypoint_hdr->closed_entry;
+  pic_param->entrypoint_fields.bits.panscan_flag = entrypoint_hdr->panscan_flag;
+  pic_param->entrypoint_fields.bits.loopfilter = entrypoint_hdr->loopfilter;
+  pic_param->conditional_overlap_flag = pic->condover;
+  pic_param->fast_uvmc_flag = entrypoint_hdr->fastuvmc;
+  pic_param->range_mapping_fields.bits.luma_flag =
+      entrypoint_hdr->range_mapy_flag;
+  pic_param->range_mapping_fields.bits.luma = entrypoint_hdr->range_mapy;
+  pic_param->range_mapping_fields.bits.chroma_flag =
+      entrypoint_hdr->range_mapuv_flag;
+  pic_param->range_mapping_fields.bits.chroma = entrypoint_hdr->range_mapuv;
+  pic_param->b_picture_fraction = get_BFRACTION (pic->bfraction);
+  pic_param->cbp_table = pic->cbptab;
+  pic_param->mb_mode_table = 0; /* XXX: interlaced frame */
+  pic_param->range_reduction_frame = 0; /* simple/main profile only */
+  pic_param->rounding_control = pic->rndctrl;
+  pic_param->post_processing = pic->postproc;
+  pic_param->picture_resolution_index = 0;      /* simple/main profile only */
+  pic_param->luma_scale = pic->lumscale;
+  pic_param->luma_shift = pic->lumshift;
+  pic_param->picture_fields.bits.frame_coding_mode = pic->fcm;
+  pic_param->picture_fields.bits.top_field_first = pic->tff;
+  pic_param->picture_fields.bits.is_first_field = pic->fcm == 0;        /* XXX: interlaced frame */
+  pic_param->picture_fields.bits.intensity_compensation =
+      pic->mvmode == GST_VC1_MVMODE_INTENSITY_COMP;
+  pic_param->raw_coding.flags.mv_type_mb = pic->mvtypemb;
+  pic_param->raw_coding.flags.direct_mb = pic->directmb;
+  pic_param->raw_coding.flags.skip_mb = pic->skipmb;
+  pic_param->raw_coding.flags.ac_pred = pic->acpred;
+  pic_param->raw_coding.flags.overflags = pic->overflags;
+  pic_param->bitplane_present.flags.bp_mv_type_mb =
+      has_MVTYPEMB_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_direct_mb =
+      has_DIRECTMB_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_skip_mb = has_SKIPMB_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_ac_pred = has_ACPRED_bitplane (decoder);
+  pic_param->bitplane_present.flags.bp_overflags =
+      has_OVERFLAGS_bitplane (decoder);
+  pic_param->reference_fields.bits.reference_distance_flag =
+      entrypoint_hdr->refdist_flag;
+  pic_param->mv_fields.bits.mv_table = pic->mvtab;
+  pic_param->mv_fields.bits.extended_mv_flag = entrypoint_hdr->extended_mv;
+  pic_param->mv_fields.bits.extended_mv_range = pic->mvrange;
+  pic_param->mv_fields.bits.extended_dmv_flag = entrypoint_hdr->extended_dmv;
+  pic_param->pic_quantizer_fields.bits.dquant = entrypoint_hdr->dquant;
+  pic_param->pic_quantizer_fields.bits.quantizer = entrypoint_hdr->quantizer;
+  pic_param->transform_fields.bits.variable_sized_transform_flag =
+      entrypoint_hdr->vstransform;
+  pic_param->transform_fields.bits.mb_level_transform_type_flag = pic->ttmbf;
+  pic_param->transform_fields.bits.frame_level_transform_type = pic->ttfrm;
+  pic_param->transform_fields.bits.transform_ac_codingset_idx2 =
+      pic->transacfrm2;
+  return TRUE;
 }
 
 static gboolean
-fill_picture(GstVaapiDecoderVC1 *decoder, GstVaapiPicture *picture)
+fill_picture (GstVaapiDecoderVC1 * decoder, GstVaapiPicture * picture)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    VAPictureParameterBufferVC1 * const pic_param = picture->param;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1VopDquant * const vopdquant = &frame_hdr->vopdquant;
-    GstVaapiPicture *prev_picture, *next_picture;
-
-    /* Fill in VAPictureParameterBufferVC1 (common fields) */
-    pic_param->forward_reference_picture                            = VA_INVALID_ID;
-    pic_param->backward_reference_picture                           = VA_INVALID_ID;
-    pic_param->inloop_decoded_picture                               = VA_INVALID_ID;
-    pic_param->sequence_fields.value                                = 0;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  VAPictureParameterBufferVC1 *const pic_param = picture->param;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1VopDquant *const vopdquant = &frame_hdr->vopdquant;
+  GstVaapiPicture *prev_picture, *next_picture;
+
+  /* Fill in VAPictureParameterBufferVC1 (common fields) */
+  pic_param->forward_reference_picture = VA_INVALID_ID;
+  pic_param->backward_reference_picture = VA_INVALID_ID;
+  pic_param->inloop_decoded_picture = VA_INVALID_ID;
+  pic_param->sequence_fields.value = 0;
 #if VA_CHECK_VERSION(0,32,0)
-    pic_param->sequence_fields.bits.profile                         = seq_hdr->profile;
+  pic_param->sequence_fields.bits.profile = seq_hdr->profile;
 #endif
-    pic_param->coded_width                                          = priv->width;
-    pic_param->coded_height                                         = priv->height;
-    pic_param->entrypoint_fields.value                              = 0;
-    pic_param->range_mapping_fields.value                           = 0;
-    pic_param->picture_fields.value                                 = 0;
-    pic_param->picture_fields.bits.picture_type                     = get_PTYPE(frame_hdr->ptype);
-    pic_param->raw_coding.value                                     = 0;
-    pic_param->bitplane_present.value                               = 0;
-    pic_param->reference_fields.value                               = 0;
-    pic_param->mv_fields.value                                      = 0;
-    pic_param->mv_fields.bits.mv_mode                               = get_MVMODE(frame_hdr);
-    pic_param->mv_fields.bits.mv_mode2                              = get_MVMODE2(frame_hdr);
-    pic_param->pic_quantizer_fields.value                           = 0;
-    pic_param->pic_quantizer_fields.bits.half_qp                    = frame_hdr->halfqp;
-    pic_param->pic_quantizer_fields.bits.pic_quantizer_scale        = frame_hdr->pquant;
-    pic_param->pic_quantizer_fields.bits.pic_quantizer_type         = frame_hdr->pquantizer;
-    pic_param->pic_quantizer_fields.bits.dq_frame                   = vopdquant->dquantfrm;
-    pic_param->pic_quantizer_fields.bits.dq_profile                 = vopdquant->dqprofile;
-    pic_param->pic_quantizer_fields.bits.dq_sb_edge                 = vopdquant->dqprofile == GST_VC1_DQPROFILE_SINGLE_EDGE ? vopdquant->dqbedge : 0;
-    pic_param->pic_quantizer_fields.bits.dq_db_edge                 = vopdquant->dqprofile == GST_VC1_DQPROFILE_DOUBLE_EDGES ? vopdquant->dqbedge : 0;
-    pic_param->pic_quantizer_fields.bits.dq_binary_level            = vopdquant->dqbilevel;
-    pic_param->pic_quantizer_fields.bits.alt_pic_quantizer          = vopdquant->altpquant;
-    pic_param->transform_fields.value                               = 0;
-    pic_param->transform_fields.bits.transform_ac_codingset_idx1    = frame_hdr->transacfrm;
-    pic_param->transform_fields.bits.intra_transform_dc_table       = frame_hdr->transdctab;
-
-    if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
-        if (!fill_picture_advanced(decoder, picture))
-            return FALSE;
-    }
-    else {
-        if (!fill_picture_structc(decoder, picture))
-            return FALSE;
-    }
+  pic_param->coded_width = priv->width;
+  pic_param->coded_height = priv->height;
+  pic_param->entrypoint_fields.value = 0;
+  pic_param->range_mapping_fields.value = 0;
+  pic_param->picture_fields.value = 0;
+  pic_param->picture_fields.bits.picture_type = get_PTYPE (frame_hdr->ptype);
+  pic_param->raw_coding.value = 0;
+  pic_param->bitplane_present.value = 0;
+  pic_param->reference_fields.value = 0;
+  pic_param->mv_fields.value = 0;
+  pic_param->mv_fields.bits.mv_mode = get_MVMODE (frame_hdr);
+  pic_param->mv_fields.bits.mv_mode2 = get_MVMODE2 (frame_hdr);
+  pic_param->pic_quantizer_fields.value = 0;
+  pic_param->pic_quantizer_fields.bits.half_qp = frame_hdr->halfqp;
+  pic_param->pic_quantizer_fields.bits.pic_quantizer_scale = frame_hdr->pquant;
+  pic_param->pic_quantizer_fields.bits.pic_quantizer_type =
+      frame_hdr->pquantizer;
+  pic_param->pic_quantizer_fields.bits.dq_frame = vopdquant->dquantfrm;
+  pic_param->pic_quantizer_fields.bits.dq_profile = vopdquant->dqprofile;
+  pic_param->pic_quantizer_fields.bits.dq_sb_edge =
+      vopdquant->dqprofile ==
+      GST_VC1_DQPROFILE_SINGLE_EDGE ? vopdquant->dqbedge : 0;
+  pic_param->pic_quantizer_fields.bits.dq_db_edge =
+      vopdquant->dqprofile ==
+      GST_VC1_DQPROFILE_DOUBLE_EDGES ? vopdquant->dqbedge : 0;
+  pic_param->pic_quantizer_fields.bits.dq_binary_level = vopdquant->dqbilevel;
+  pic_param->pic_quantizer_fields.bits.alt_pic_quantizer = vopdquant->altpquant;
+  pic_param->transform_fields.value = 0;
+  pic_param->transform_fields.bits.transform_ac_codingset_idx1 =
+      frame_hdr->transacfrm;
+  pic_param->transform_fields.bits.intra_transform_dc_table =
+      frame_hdr->transdctab;
+
+  if (seq_hdr->profile == GST_VC1_PROFILE_ADVANCED) {
+    if (!fill_picture_advanced (decoder, picture))
+      return FALSE;
+  } else {
+    if (!fill_picture_structc (decoder, picture))
+      return FALSE;
+  }
+
+  gst_vaapi_dpb_get_neighbours (priv->dpb, picture,
+      &prev_picture, &next_picture);
+
+  switch (picture->type) {
+    case GST_VAAPI_PICTURE_TYPE_B:
+      if (next_picture)
+        pic_param->backward_reference_picture = next_picture->surface_id;
+      if (prev_picture)
+        pic_param->forward_reference_picture = prev_picture->surface_id;
+      else if (!priv->closed_entry)
+        GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
+      break;
+    case GST_VAAPI_PICTURE_TYPE_P:
+      if (prev_picture)
+        pic_param->forward_reference_picture = prev_picture->surface_id;
+      break;
+    default:
+      break;
+  }
 
-    gst_vaapi_dpb_get_neighbours(priv->dpb, picture,
-        &prev_picture, &next_picture);
+  if (pic_param->bitplane_present.value) {
+    const guint8 *bitplanes[3];
+    guint x, y, n;
 
     switch (picture->type) {
-    case GST_VAAPI_PICTURE_TYPE_B:
-        if (next_picture)
-            pic_param->backward_reference_picture = next_picture->surface_id;
-        if (prev_picture)
-            pic_param->forward_reference_picture = prev_picture->surface_id;
-        else if (!priv->closed_entry)
-            GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_SKIPPED);
+      case GST_VAAPI_PICTURE_TYPE_P:
+        bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ?
+            priv->bitplanes->directmb : NULL;
+        bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ?
+            priv->bitplanes->skipmb : NULL;
+        bitplanes[2] = pic_param->bitplane_present.flags.bp_mv_type_mb ?
+            priv->bitplanes->mvtypemb : NULL;
         break;
-    case GST_VAAPI_PICTURE_TYPE_P:
-        if (prev_picture)
-            pic_param->forward_reference_picture = prev_picture->surface_id;
+      case GST_VAAPI_PICTURE_TYPE_B:
+        bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb ?
+            priv->bitplanes->directmb : NULL;
+        bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb ?
+            priv->bitplanes->skipmb : NULL;
+        bitplanes[2] = NULL;    /* XXX: interlaced frame (FORWARD plane) */
         break;
-    default:
+      case GST_VAAPI_PICTURE_TYPE_BI:
+      case GST_VAAPI_PICTURE_TYPE_I:
+        bitplanes[0] = NULL;    /* XXX: interlaced frame (FIELDTX plane) */
+        bitplanes[1] = pic_param->bitplane_present.flags.bp_ac_pred ?
+            priv->bitplanes->acpred : NULL;
+        bitplanes[2] = pic_param->bitplane_present.flags.bp_overflags ?
+            priv->bitplanes->overflags : NULL;
+        break;
+      default:
+        bitplanes[0] = NULL;
+        bitplanes[1] = NULL;
+        bitplanes[2] = NULL;
         break;
     }
 
-    if (pic_param->bitplane_present.value) {
-        const guint8 *bitplanes[3];
-        guint x, y, n;
-
-        switch (picture->type) {
-        case GST_VAAPI_PICTURE_TYPE_P:
-            bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb  ? priv->bitplanes->directmb  : NULL;
-            bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb    ? priv->bitplanes->skipmb    : NULL;
-            bitplanes[2] = pic_param->bitplane_present.flags.bp_mv_type_mb ? priv->bitplanes->mvtypemb  : NULL;
-            break;
-        case GST_VAAPI_PICTURE_TYPE_B:
-            bitplanes[0] = pic_param->bitplane_present.flags.bp_direct_mb  ? priv->bitplanes->directmb  : NULL;
-            bitplanes[1] = pic_param->bitplane_present.flags.bp_skip_mb    ? priv->bitplanes->skipmb    : NULL;
-            bitplanes[2] = NULL; /* XXX: interlaced frame (FORWARD plane) */
-            break;
-        case GST_VAAPI_PICTURE_TYPE_BI:
-        case GST_VAAPI_PICTURE_TYPE_I:
-            bitplanes[0] = NULL; /* XXX: interlaced frame (FIELDTX plane) */
-            bitplanes[1] = pic_param->bitplane_present.flags.bp_ac_pred    ? priv->bitplanes->acpred    : NULL;
-            bitplanes[2] = pic_param->bitplane_present.flags.bp_overflags  ? priv->bitplanes->overflags : NULL;
-            break;
-        default:
-            bitplanes[0] = NULL;
-            bitplanes[1] = NULL;
-            bitplanes[2] = NULL;
-            break;
-        }
-
-        picture->bitplane = GST_VAAPI_BITPLANE_NEW(
-            decoder,
-            (seq_hdr->mb_width * seq_hdr->mb_height + 1) / 2
-        );
-        if (!picture->bitplane)
-            return FALSE;
-
-        n = 0;
-        for (y = 0; y < seq_hdr->mb_height; y++)
-            for (x = 0; x < seq_hdr->mb_width; x++, n++)
-                pack_bitplanes(picture->bitplane, n, bitplanes, x, y, seq_hdr->mb_stride);
-        if (n & 1) /* move last nibble to the high order */
-            picture->bitplane->data[n/2] <<= 4;
-    }
-    return TRUE;
+    picture->bitplane = GST_VAAPI_BITPLANE_NEW (decoder,
+        (seq_hdr->mb_width * seq_hdr->mb_height + 1) / 2);
+    if (!picture->bitplane)
+      return FALSE;
+
+    n = 0;
+    for (y = 0; y < seq_hdr->mb_height; y++)
+      for (x = 0; x < seq_hdr->mb_width; x++, n++)
+        pack_bitplanes (picture->bitplane, n, bitplanes, x, y,
+            seq_hdr->mb_stride);
+    if (n & 1)                  /* move last nibble to the high order */
+      picture->bitplane->data[n / 2] <<= 4;
+  }
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-decode_slice_chunk(GstVaapiDecoderVC1 *decoder, GstVC1BDU *ebdu,
+decode_slice_chunk (GstVaapiDecoderVC1 * decoder, GstVC1BDU * ebdu,
     guint slice_addr, guint header_size)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiPicture * const picture = priv->current_picture;
-    GstVaapiSlice *slice;
-    VASliceParameterBufferVC1 *slice_param;
-
-    slice = GST_VAAPI_SLICE_NEW(VC1, decoder,
-        ebdu->data + ebdu->sc_offset,
-        ebdu->size + ebdu->offset - ebdu->sc_offset);
-    if (!slice) {
-        GST_ERROR("failed to allocate slice");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_add_slice(picture, slice);
-
-    /* Fill in VASliceParameterBufferVC1 */
-    slice_param = slice->param;
-    slice_param->macroblock_offset = 8 * (ebdu->offset - ebdu->sc_offset) +
-        header_size;
-    slice_param->slice_vertical_position = slice_addr;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiPicture *const picture = priv->current_picture;
+  GstVaapiSlice *slice;
+  VASliceParameterBufferVC1 *slice_param;
+
+  slice = GST_VAAPI_SLICE_NEW (VC1, decoder,
+      ebdu->data + ebdu->sc_offset,
+      ebdu->size + ebdu->offset - ebdu->sc_offset);
+  if (!slice) {
+    GST_ERROR ("failed to allocate slice");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_add_slice (picture, slice);
+
+  /* Fill in VASliceParameterBufferVC1 */
+  slice_param = slice->param;
+  slice_param->macroblock_offset = 8 * (ebdu->offset - ebdu->sc_offset) +
+      header_size;
+  slice_param->slice_vertical_position = slice_addr;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
+decode_frame (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu, GstVC1BDU * ebdu)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
-    GstVC1ParserResult result;
-    GstVaapiPicture * const picture = priv->current_picture;
-
-    memset(frame_hdr, 0, sizeof(*frame_hdr));
-    result = gst_vc1_parse_frame_header(
-        rbdu->data + rbdu->offset,
-        rbdu->size,
-        frame_hdr,
-        &priv->seq_hdr,
-        priv->bitplanes
-    );
-    if (result != GST_VC1_PARSER_OK) {
-        GST_ERROR("failed to parse frame layer");
-        return get_status(result);
-    }
-
-    switch (frame_hdr->ptype) {
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1FrameHdr *const frame_hdr = &priv->frame_hdr;
+  GstVC1ParserResult result;
+  GstVaapiPicture *const picture = priv->current_picture;
+
+  memset (frame_hdr, 0, sizeof (*frame_hdr));
+  result = gst_vc1_parse_frame_header (rbdu->data + rbdu->offset,
+      rbdu->size, frame_hdr, &priv->seq_hdr, priv->bitplanes);
+  if (result != GST_VC1_PARSER_OK) {
+    GST_ERROR ("failed to parse frame layer");
+    return get_status (result);
+  }
+
+  switch (frame_hdr->ptype) {
     case GST_VC1_PICTURE_TYPE_I:
-        picture->type   = GST_VAAPI_PICTURE_TYPE_I;
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_I;
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      break;
     case GST_VC1_PICTURE_TYPE_SKIPPED:
     case GST_VC1_PICTURE_TYPE_P:
-        picture->type   = GST_VAAPI_PICTURE_TYPE_P;
-        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_P;
+      GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+      break;
     case GST_VC1_PICTURE_TYPE_B:
-        picture->type   = GST_VAAPI_PICTURE_TYPE_B;
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_B;
+      break;
     case GST_VC1_PICTURE_TYPE_BI:
-        picture->type   = GST_VAAPI_PICTURE_TYPE_BI;
-        break;
+      picture->type = GST_VAAPI_PICTURE_TYPE_BI;
+      break;
     default:
-        GST_ERROR("unsupported picture type %d", frame_hdr->ptype);
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
-
-    /* Update presentation time */
-    if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
-        picture->poc = priv->last_non_b_picture ?
-            (priv->last_non_b_picture->poc + 1) : priv->next_poc;
-        priv->next_poc = picture->poc + 1;
-        gst_vaapi_picture_replace(&priv->last_non_b_picture, picture);
-    }
-    else if (!priv->last_non_b_picture)
-        picture->poc = priv->next_poc++;
-    else {                                              /* B or BI */
-        picture->poc = priv->last_non_b_picture->poc++;
-        priv->next_poc = priv->last_non_b_picture->poc + 1;
-    }
-    picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
-
-    if (!fill_picture(decoder, picture))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    return decode_slice_chunk(decoder, ebdu, 0, frame_hdr->header_size);
+      GST_ERROR ("unsupported picture type %d", frame_hdr->ptype);
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+
+  /* Update presentation time */
+  if (GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
+    picture->poc = priv->last_non_b_picture ?
+        (priv->last_non_b_picture->poc + 1) : priv->next_poc;
+    priv->next_poc = picture->poc + 1;
+    gst_vaapi_picture_replace (&priv->last_non_b_picture, picture);
+  } else if (!priv->last_non_b_picture)
+    picture->poc = priv->next_poc++;
+  else {                        /* B or BI */
+    picture->poc = priv->last_non_b_picture->poc++;
+    priv->next_poc = priv->last_non_b_picture->poc + 1;
+  }
+  picture->pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
+
+  if (!fill_picture (decoder, picture))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  return decode_slice_chunk (decoder, ebdu, 0, frame_hdr->header_size);
 }
 
 static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
+decode_slice (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu, GstVC1BDU * ebdu)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SliceHdr slice_hdr;
-    GstVC1ParserResult result;
-
-    memset(&slice_hdr, 0, sizeof(slice_hdr));
-    result = gst_vc1_parse_slice_header(
-        rbdu->data + rbdu->offset,
-        rbdu->size,
-        &slice_hdr,
-        &priv->seq_hdr
-    );
-    if (result != GST_VC1_PARSER_OK) {
-        GST_ERROR("failed to parse slice layer");
-        return get_status(result);
-    }
-    return decode_slice_chunk(decoder, ebdu, slice_hdr.slice_addr,
-        slice_hdr.header_size);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SliceHdr slice_hdr;
+  GstVC1ParserResult result;
+
+  memset (&slice_hdr, 0, sizeof (slice_hdr));
+  result = gst_vc1_parse_slice_header (rbdu->data + rbdu->offset,
+      rbdu->size, &slice_hdr, &priv->seq_hdr);
+  if (result != GST_VC1_PARSER_OK) {
+    GST_ERROR ("failed to parse slice layer");
+    return get_status (result);
+  }
+  return decode_slice_chunk (decoder, ebdu, slice_hdr.slice_addr,
+      slice_hdr.header_size);
 }
 
 static gboolean
-decode_rbdu(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
+decode_rbdu (GstVaapiDecoderVC1 * decoder, GstVC1BDU * rbdu, GstVC1BDU * ebdu)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    guint8 *rbdu_buffer;
-    guint i, j, rbdu_buffer_size;
-
-    /* BDU are encapsulated in advanced profile mode only */
-    if (priv->profile != GST_VAAPI_PROFILE_VC1_ADVANCED) {
-        memcpy(rbdu, ebdu, sizeof(*rbdu));
-        return TRUE;
-    }
-
-    /* Reallocate unescaped bitstream buffer */
-    rbdu_buffer = priv->rbdu_buffer;
-    if (!rbdu_buffer || ebdu->size > priv->rbdu_buffer_size) {
-        rbdu_buffer = g_realloc(priv->rbdu_buffer, ebdu->size);
-        if (!rbdu_buffer)
-            return FALSE;
-        priv->rbdu_buffer = rbdu_buffer;
-        priv->rbdu_buffer_size = ebdu->size;
-    }
-
-    /* Unescape bitstream buffer */
-    if (ebdu->size < 4) {
-        memcpy(rbdu_buffer, ebdu->data + ebdu->offset, ebdu->size);
-        rbdu_buffer_size = ebdu->size;
-    }
-    else {
-        guint8 * const bdu_buffer = ebdu->data + ebdu->offset;
-        for (i = 0, j = 0; i < ebdu->size; i++) {
-            if (i >= 2 && i < ebdu->size - 1 &&
-                bdu_buffer[i - 1] == 0x00   &&
-                bdu_buffer[i - 2] == 0x00   &&
-                bdu_buffer[i    ] == 0x03   &&
-                bdu_buffer[i + 1] <= 0x03)
-                i++;
-            rbdu_buffer[j++] = bdu_buffer[i];
-        }
-        rbdu_buffer_size = j;
-    }
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  guint8 *rbdu_buffer;
+  guint i, j, rbdu_buffer_size;
 
-    /* Reconstruct RBDU */
-    rbdu->type      = ebdu->type;
-    rbdu->size      = rbdu_buffer_size;
-    rbdu->sc_offset = 0;
-    rbdu->offset    = 0;
-    rbdu->data      = rbdu_buffer;
+  /* BDU are encapsulated in advanced profile mode only */
+  if (priv->profile != GST_VAAPI_PROFILE_VC1_ADVANCED) {
+    memcpy (rbdu, ebdu, sizeof (*rbdu));
     return TRUE;
+  }
+
+  /* Reallocate unescaped bitstream buffer */
+  rbdu_buffer = priv->rbdu_buffer;
+  if (!rbdu_buffer || ebdu->size > priv->rbdu_buffer_size) {
+    rbdu_buffer = g_realloc (priv->rbdu_buffer, ebdu->size);
+    if (!rbdu_buffer)
+      return FALSE;
+    priv->rbdu_buffer = rbdu_buffer;
+    priv->rbdu_buffer_size = ebdu->size;
+  }
+
+  /* Unescape bitstream buffer */
+  if (ebdu->size < 4) {
+    memcpy (rbdu_buffer, ebdu->data + ebdu->offset, ebdu->size);
+    rbdu_buffer_size = ebdu->size;
+  } else {
+    guint8 *const bdu_buffer = ebdu->data + ebdu->offset;
+    for (i = 0, j = 0; i < ebdu->size; i++) {
+      if (i >= 2 && i < ebdu->size - 1 &&
+          bdu_buffer[i - 1] == 0x00 &&
+          bdu_buffer[i - 2] == 0x00 &&
+          bdu_buffer[i] == 0x03 && bdu_buffer[i + 1] <= 0x03)
+        i++;
+      rbdu_buffer[j++] = bdu_buffer[i];
+    }
+    rbdu_buffer_size = j;
+  }
+
+  /* Reconstruct RBDU */
+  rbdu->type = ebdu->type;
+  rbdu->size = rbdu_buffer_size;
+  rbdu->sc_offset = 0;
+  rbdu->offset = 0;
+  rbdu->data = rbdu_buffer;
+  return TRUE;
 }
 
 static GstVaapiDecoderStatus
-decode_ebdu(GstVaapiDecoderVC1 *decoder, GstVC1BDU *ebdu)
+decode_ebdu (GstVaapiDecoderVC1 * decoder, GstVC1BDU * ebdu)
 {
-    GstVaapiDecoderStatus status;
-    GstVC1BDU rbdu;
+  GstVaapiDecoderStatus status;
+  GstVC1BDU rbdu;
 
-    if (!decode_rbdu(decoder, &rbdu, ebdu))
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  if (!decode_rbdu (decoder, &rbdu, ebdu))
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
 
-    switch (ebdu->type) {
+  switch (ebdu->type) {
     case GST_VC1_SEQUENCE:
-        status = decode_sequence(decoder, &rbdu, ebdu);
-        break;
+      status = decode_sequence (decoder, &rbdu, ebdu);
+      break;
     case GST_VC1_ENTRYPOINT:
-        status = decode_entry_point(decoder, &rbdu, ebdu);
-        break;
+      status = decode_entry_point (decoder, &rbdu, ebdu);
+      break;
     case GST_VC1_FRAME:
-        status = decode_frame(decoder, &rbdu, ebdu);
-        break;
+      status = decode_frame (decoder, &rbdu, ebdu);
+      break;
     case GST_VC1_SLICE:
-        status = decode_slice(decoder, &rbdu, ebdu);
-        break;
+      status = decode_slice (decoder, &rbdu, ebdu);
+      break;
     case GST_VC1_END_OF_SEQ:
-        status = decode_sequence_end(decoder);
-        break;
+      status = decode_sequence_end (decoder);
+      break;
     case GST_VC1_FIELD_USER:
     case GST_VC1_FRAME_USER:
     case GST_VC1_ENTRY_POINT_USER:
     case GST_VC1_SEQUENCE_USER:
-        /* Let's just ignore them */
-        status = GST_VAAPI_DECODER_STATUS_SUCCESS;
-        break;
+      /* Let's just ignore them */
+      status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+      break;
     default:
-        GST_WARNING("unsupported BDU type %d", ebdu->type);
-        status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
-        break;
-    }
-    return status;
+      GST_WARNING ("unsupported BDU type %d", ebdu->type);
+      status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+      break;
+  }
+  return status;
 }
 
 static GstVaapiDecoderStatus
-decode_buffer(GstVaapiDecoderVC1 *decoder, guchar *buf, guint buf_size)
+decode_buffer (GstVaapiDecoderVC1 * decoder, guchar * buf, guint buf_size)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1BDU ebdu;
-
-    if (priv->has_codec_data) {
-        ebdu.type      = GST_VC1_FRAME;
-        ebdu.sc_offset = 0;
-        ebdu.offset    = 0;
-    }
-    else {
-        ebdu.type      = buf[3];
-        ebdu.sc_offset = 0;
-        ebdu.offset    = 4;
-    }
-    ebdu.data = buf;
-    ebdu.size = buf_size - ebdu.offset;
-    return decode_ebdu(decoder, &ebdu);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1BDU ebdu;
+
+  if (priv->has_codec_data) {
+    ebdu.type = GST_VC1_FRAME;
+    ebdu.sc_offset = 0;
+    ebdu.offset = 0;
+  } else {
+    ebdu.type = buf[3];
+    ebdu.sc_offset = 0;
+    ebdu.offset = 4;
+  }
+  ebdu.data = buf;
+  ebdu.size = buf_size - ebdu.offset;
+  return decode_ebdu (decoder, &ebdu);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_decode_codec_data(GstVaapiDecoder *base_decoder,
-    const guchar *buf, guint buf_size)
+gst_vaapi_decoder_vc1_decode_codec_data (GstVaapiDecoder * base_decoder,
+    const guchar * buf, guint buf_size)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
-    GstVaapiDecoderStatus status;
-    GstVC1ParserResult result;
-    GstVC1BDU ebdu;
-    GstCaps *caps;
-    GstStructure *structure;
-    guint ofs;
-    gint width, height;
-    guint32 format;
-    gint version;
-    const gchar *s;
-
-    priv->has_codec_data = TRUE;
-
-    width = GST_VAAPI_DECODER_WIDTH(decoder);
-    height = GST_VAAPI_DECODER_HEIGHT(decoder);
-    if (!width || !height) {
-        GST_ERROR("failed to parse size from codec-data");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
-
-    caps = GST_VAAPI_DECODER_CODEC_STATE(decoder)->caps;
-    structure = gst_caps_get_structure(caps, 0);
-    s = gst_structure_get_string(structure, "format");
-    if (s && strlen(s) == 4) {
-        format = GST_MAKE_FOURCC(s[0], s[1], s[2], s[3]);
-    } else {
-        /* Try to determine format from "wmvversion" property */
-        if (gst_structure_get_int(structure, "wmvversion", &version))
-            format = (version >= 1 && version <= 3) ?
-                GST_MAKE_FOURCC('W','M','V',('0'+version)) : 0;
-        else
-            format = 0;
-    }
-    if (!format) {
-        GST_ERROR("failed to parse profile from codec-data");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
-    }
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVC1SeqHdr *const seq_hdr = &priv->seq_hdr;
+  GstVaapiDecoderStatus status;
+  GstVC1ParserResult result;
+  GstVC1BDU ebdu;
+  GstCaps *caps;
+  GstStructure *structure;
+  guint ofs;
+  gint width, height;
+  guint32 format;
+  gint version;
+  const gchar *s;
+
+  priv->has_codec_data = TRUE;
+
+  width = GST_VAAPI_DECODER_WIDTH (decoder);
+  height = GST_VAAPI_DECODER_HEIGHT (decoder);
+  if (!width || !height) {
+    GST_ERROR ("failed to parse size from codec-data");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
+
+  caps = GST_VAAPI_DECODER_CODEC_STATE (decoder)->caps;
+  structure = gst_caps_get_structure (caps, 0);
+  s = gst_structure_get_string (structure, "format");
+  if (s && strlen (s) == 4) {
+    format = GST_MAKE_FOURCC (s[0], s[1], s[2], s[3]);
+  } else {
+    /* Try to determine format from "wmvversion" property */
+    if (gst_structure_get_int (structure, "wmvversion", &version))
+      format = (version >= 1 && version <= 3) ?
+          GST_MAKE_FOURCC ('W', 'M', 'V', ('0' + version)) : 0;
+    else
+      format = 0;
+  }
+  if (!format) {
+    GST_ERROR ("failed to parse profile from codec-data");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
+  }
+
+  /* WMV3 -- expecting sequence header */
+  if (format == GST_MAKE_FOURCC ('W', 'M', 'V', '3')) {
+    seq_hdr->struct_c.coded_width = width;
+    seq_hdr->struct_c.coded_height = height;
+    ebdu.type = GST_VC1_SEQUENCE;
+    ebdu.size = buf_size;
+    ebdu.sc_offset = 0;
+    ebdu.offset = 0;
+    ebdu.data = (guint8 *) buf;
+    return decode_ebdu (decoder, &ebdu);
+  }
+
+  /* WVC1 -- expecting bitstream data units */
+  if (format != GST_MAKE_FOURCC ('W', 'V', 'C', '1'))
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+  seq_hdr->advanced.max_coded_width = width;
+  seq_hdr->advanced.max_coded_height = height;
+
+  ofs = 0;
+  do {
+    result = gst_vc1_identify_next_bdu (buf + ofs, buf_size - ofs, &ebdu);
 
-    /* WMV3 -- expecting sequence header */
-    if (format == GST_MAKE_FOURCC('W','M','V','3')) {
-        seq_hdr->struct_c.coded_width  = width;
-        seq_hdr->struct_c.coded_height = height;
-        ebdu.type      = GST_VC1_SEQUENCE;
-        ebdu.size      = buf_size;
-        ebdu.sc_offset = 0;
-        ebdu.offset    = 0;
-        ebdu.data      = (guint8 *)buf;
-        return decode_ebdu(decoder, &ebdu);
+    switch (result) {
+      case GST_VC1_PARSER_NO_BDU_END:
+        /* Assume the EBDU is complete within codec-data bounds */
+        ebdu.size = buf_size - ofs - ebdu.offset;
+        // fall-through
+      case GST_VC1_PARSER_OK:
+        status = decode_ebdu (decoder, &ebdu);
+        ofs += ebdu.offset + ebdu.size;
+        break;
+      default:
+        status = get_status (result);
+        break;
     }
-
-    /* WVC1 -- expecting bitstream data units */
-    if (format != GST_MAKE_FOURCC('W','V','C','1'))
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
-    seq_hdr->advanced.max_coded_width  = width;
-    seq_hdr->advanced.max_coded_height = height;
-
-    ofs = 0;
-    do {
-        result = gst_vc1_identify_next_bdu(
-            buf + ofs,
-            buf_size - ofs,
-            &ebdu
-        );
-
-        switch (result) {
-        case GST_VC1_PARSER_NO_BDU_END:
-            /* Assume the EBDU is complete within codec-data bounds */
-            ebdu.size = buf_size - ofs - ebdu.offset;
-            // fall-through
-        case GST_VC1_PARSER_OK:
-            status = decode_ebdu(decoder, &ebdu);
-            ofs += ebdu.offset + ebdu.size;
-            break;
-        default:
-            status = get_status(result);
-            break;
-        }
-    } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS && ofs < buf_size);
-    return status;
+  } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS && ofs < buf_size);
+  return status;
 }
 
 static GstVaapiDecoderStatus
-ensure_decoder(GstVaapiDecoderVC1 *decoder)
+ensure_decoder (GstVaapiDecoderVC1 * decoder)
 {
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-
-    if (!priv->is_opened) {
-        priv->is_opened = gst_vaapi_decoder_vc1_open(decoder);
-        if (!priv->is_opened)
-            return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
-
-        status = gst_vaapi_decoder_decode_codec_data(
-            GST_VAAPI_DECODER_CAST(decoder));
-        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-            return status;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+
+  if (!priv->is_opened) {
+    priv->is_opened = gst_vaapi_decoder_vc1_open (decoder);
+    if (!priv->is_opened)
+      return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
+
+    status =
+        gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
+    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+      return status;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static inline gint
-scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
+scan_for_start_code (GstAdapter * adapter, guint ofs, guint size, guint32 * scp)
 {
-    return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
-        0xffffff00, 0x00000100, ofs, size, scp);
+  return (gint) gst_adapter_masked_scan_uint32_peek (adapter,
+      0xffffff00, 0x00000100, ofs, size, scp);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_parse(GstVaapiDecoder *base_decoder,
-    GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_vc1_parse (GstVaapiDecoder * base_decoder,
+    GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-    guint8 bdu_type;
-    guint size, buf_size, flags = 0;
-    gint ofs;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-
-    size = gst_adapter_available(adapter);
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+  guint8 bdu_type;
+  guint size, buf_size, flags = 0;
+  gint ofs;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    if (priv->has_codec_data) {
-        // Assume demuxer sends out plain frames
-        if (size < 1)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        buf_size = size;
-        bdu_type = GST_VC1_FRAME;
-    }
-    else {
-        if (size < 4)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-        ofs = scan_for_start_code(adapter, 0, size, NULL);
-        if (ofs < 0)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-        gst_adapter_flush(adapter, ofs);
-        size -= ofs;
-
-        ofs = G_UNLIKELY(size < 8) ? -1 :
-            scan_for_start_code(adapter, 4, size - 4, NULL);
-        if (ofs < 0) {
-            // Assume the whole packet is present if end-of-stream
-            if (!at_eos)
-                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-            ofs = size;
-        }
-        buf_size = ofs;
-        gst_adapter_copy(adapter, &bdu_type, 3, 1);
+  size = gst_adapter_available (adapter);
+
+  if (priv->has_codec_data) {
+    // Assume demuxer sends out plain frames
+    if (size < 1)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+    buf_size = size;
+    bdu_type = GST_VC1_FRAME;
+  } else {
+    if (size < 4)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+    ofs = scan_for_start_code (adapter, 0, size, NULL);
+    if (ofs < 0)
+      return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+    gst_adapter_flush (adapter, ofs);
+    size -= ofs;
+
+    ofs = G_UNLIKELY (size < 8) ? -1 :
+        scan_for_start_code (adapter, 4, size - 4, NULL);
+    if (ofs < 0) {
+      // Assume the whole packet is present if end-of-stream
+      if (!at_eos)
+        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+      ofs = size;
     }
+    buf_size = ofs;
+    gst_adapter_copy (adapter, &bdu_type, 3, 1);
+  }
 
-    unit->size = buf_size;
+  unit->size = buf_size;
 
-    /* Check for new picture layer */
-    switch (bdu_type) {
+  /* Check for new picture layer */
+  switch (bdu_type) {
     case GST_VC1_END_OF_SEQ:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+      break;
     case GST_VC1_SEQUENCE:
     case GST_VC1_ENTRYPOINT:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      break;
     case GST_VC1_FRAME:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        break;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      break;
     case GST_VC1_SLICE:
-        flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        break;
-    }
-    GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+      flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+      break;
+  }
+  GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_decode(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_vc1_decode (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderStatus status;
-    GstBuffer * const buffer =
-        GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
-    GstMapInfo map_info;
-
-    status = ensure_decoder(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderStatus status;
+  GstBuffer *const buffer =
+      GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
+  GstMapInfo map_info;
+
+  status = ensure_decoder (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
 
-    if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
-        GST_ERROR("failed to map buffer");
-        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-    }
+  if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
+    GST_ERROR ("failed to map buffer");
+    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+  }
 
-    status = decode_buffer(decoder, map_info.data + unit->offset, unit->size);
-    gst_buffer_unmap(buffer, &map_info);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
-        return status;
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  status = decode_buffer (decoder, map_info.data + unit->offset, unit->size);
+  gst_buffer_unmap (buffer, &map_info);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+    return status;
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_start_frame(GstVaapiDecoder *base_decoder,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_decoder_vc1_start_frame (GstVaapiDecoder * base_decoder,
+    GstVaapiDecoderUnit * unit)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
-    GstVaapiDecoderStatus status;
-    GstVaapiPicture *picture;
-
-    status = ensure_context(decoder);
-    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
-        GST_ERROR("failed to reset context");
-        return status;
-    }
-
-    picture = GST_VAAPI_PICTURE_NEW(VC1, decoder);
-    if (!picture) {
-        GST_ERROR("failed to allocate picture");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    gst_vaapi_picture_replace(&priv->current_picture, picture);
-    gst_vaapi_picture_unref(picture);
-
-    /* Update cropping rectangle */
-    do {
-        GstVC1AdvancedSeqHdr *adv_hdr;
-        GstVaapiRectangle crop_rect;
-
-        if (priv->profile != GST_VAAPI_PROFILE_VC1_ADVANCED)
-            break;
-
-        adv_hdr = &priv->seq_hdr.advanced;
-        if (!adv_hdr->display_ext)
-            break;
-
-        crop_rect.x = 0;
-        crop_rect.y = 0;
-        crop_rect.width = adv_hdr->disp_horiz_size;
-        crop_rect.height = adv_hdr->disp_vert_size;
-        if (crop_rect.width <= priv->width && crop_rect.height <= priv->height)
-            gst_vaapi_picture_set_crop_rect(picture, &crop_rect);
-    } while (0);
-
-    if (!gst_vc1_bitplanes_ensure_size(priv->bitplanes, &priv->seq_hdr)) {
-        GST_ERROR("failed to allocate bitplanes");
-        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-    }
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
+  GstVaapiDecoderStatus status;
+  GstVaapiPicture *picture;
+
+  status = ensure_context (decoder);
+  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+    GST_ERROR ("failed to reset context");
+    return status;
+  }
+
+  picture = GST_VAAPI_PICTURE_NEW (VC1, decoder);
+  if (!picture) {
+    GST_ERROR ("failed to allocate picture");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  gst_vaapi_picture_replace (&priv->current_picture, picture);
+  gst_vaapi_picture_unref (picture);
+
+  /* Update cropping rectangle */
+  do {
+    GstVC1AdvancedSeqHdr *adv_hdr;
+    GstVaapiRectangle crop_rect;
+
+    if (priv->profile != GST_VAAPI_PROFILE_VC1_ADVANCED)
+      break;
+
+    adv_hdr = &priv->seq_hdr.advanced;
+    if (!adv_hdr->display_ext)
+      break;
+
+    crop_rect.x = 0;
+    crop_rect.y = 0;
+    crop_rect.width = adv_hdr->disp_horiz_size;
+    crop_rect.height = adv_hdr->disp_vert_size;
+    if (crop_rect.width <= priv->width && crop_rect.height <= priv->height)
+      gst_vaapi_picture_set_crop_rect (picture, &crop_rect);
+  } while (0);
+
+  if (!gst_vc1_bitplanes_ensure_size (priv->bitplanes, &priv->seq_hdr)) {
+    GST_ERROR ("failed to allocate bitplanes");
+    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+  }
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_end_frame(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_vc1_end_frame (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
 
-    return decode_current_picture(decoder);
+  return decode_current_picture (decoder);
 }
 
 static GstVaapiDecoderStatus
-gst_vaapi_decoder_vc1_flush(GstVaapiDecoder *base_decoder)
+gst_vaapi_decoder_vc1_flush (GstVaapiDecoder * base_decoder)
 {
-    GstVaapiDecoderVC1 * const decoder =
-        GST_VAAPI_DECODER_VC1_CAST(base_decoder);
-    GstVaapiDecoderVC1Private * const priv = &decoder->priv;
+  GstVaapiDecoderVC1 *const decoder = GST_VAAPI_DECODER_VC1_CAST (base_decoder);
+  GstVaapiDecoderVC1Private *const priv = &decoder->priv;
 
-    gst_vaapi_dpb_flush(priv->dpb);
-    return GST_VAAPI_DECODER_STATUS_SUCCESS;
+  gst_vaapi_dpb_flush (priv->dpb);
+  return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
 static void
-gst_vaapi_decoder_vc1_class_init(GstVaapiDecoderVC1Class *klass)
+gst_vaapi_decoder_vc1_class_init (GstVaapiDecoderVC1Class * klass)
 {
-    GstVaapiMiniObjectClass * const object_class =
-        GST_VAAPI_MINI_OBJECT_CLASS(klass);
-    GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
-
-    object_class->size          = sizeof(GstVaapiDecoderVC1);
-    object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
-
-    decoder_class->create       = gst_vaapi_decoder_vc1_create;
-    decoder_class->destroy      = gst_vaapi_decoder_vc1_destroy;
-    decoder_class->parse        = gst_vaapi_decoder_vc1_parse;
-    decoder_class->decode       = gst_vaapi_decoder_vc1_decode;
-    decoder_class->start_frame  = gst_vaapi_decoder_vc1_start_frame;
-    decoder_class->end_frame    = gst_vaapi_decoder_vc1_end_frame;
-    decoder_class->flush        = gst_vaapi_decoder_vc1_flush;
-
-    decoder_class->decode_codec_data =
-        gst_vaapi_decoder_vc1_decode_codec_data;
+  GstVaapiMiniObjectClass *const object_class =
+      GST_VAAPI_MINI_OBJECT_CLASS (klass);
+  GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
+
+  object_class->size = sizeof (GstVaapiDecoderVC1);
+  object_class->finalize = (GDestroyNotify) gst_vaapi_decoder_finalize;
+
+  decoder_class->create = gst_vaapi_decoder_vc1_create;
+  decoder_class->destroy = gst_vaapi_decoder_vc1_destroy;
+  decoder_class->parse = gst_vaapi_decoder_vc1_parse;
+  decoder_class->decode = gst_vaapi_decoder_vc1_decode;
+  decoder_class->start_frame = gst_vaapi_decoder_vc1_start_frame;
+  decoder_class->end_frame = gst_vaapi_decoder_vc1_end_frame;
+  decoder_class->flush = gst_vaapi_decoder_vc1_flush;
+
+  decoder_class->decode_codec_data = gst_vaapi_decoder_vc1_decode_codec_data;
 }
 
 static inline const GstVaapiDecoderClass *
-gst_vaapi_decoder_vc1_class(void)
+gst_vaapi_decoder_vc1_class (void)
 {
-    static GstVaapiDecoderVC1Class g_class;
-    static gsize g_class_init = FALSE;
-
-    if (g_once_init_enter(&g_class_init)) {
-        gst_vaapi_decoder_vc1_class_init(&g_class);
-        g_once_init_leave(&g_class_init, TRUE);
-    }
-    return GST_VAAPI_DECODER_CLASS(&g_class);
+  static GstVaapiDecoderVC1Class g_class;
+  static gsize g_class_init = FALSE;
+
+  if (g_once_init_enter (&g_class_init)) {
+    gst_vaapi_decoder_vc1_class_init (&g_class);
+    g_once_init_leave (&g_class_init, TRUE);
+  }
+  return GST_VAAPI_DECODER_CLASS (&g_class);
 }
 
 /**
@@ -1437,7 +1439,7 @@ gst_vaapi_decoder_vc1_class(void)
  * Return value: the newly allocated #GstVaapiDecoder object
  */
 GstVaapiDecoder *
-gst_vaapi_decoder_vc1_new(GstVaapiDisplay *display, GstCaps *caps)
+gst_vaapi_decoder_vc1_new (GstVaapiDisplay * display, GstCaps * caps)
 {
-    return gst_vaapi_decoder_new(gst_vaapi_decoder_vc1_class(), display, caps);
+  return gst_vaapi_decoder_new (gst_vaapi_decoder_vc1_class (), display, caps);
 }
index 8a295a5..98bedcb 100644 (file)
     } while (0)
 
 static gboolean
-_gst_vaapi_image_map(GstVaapiImage *image, GstVaapiImageRaw *raw_image);
+_gst_vaapi_image_map (GstVaapiImage * image, GstVaapiImageRaw * raw_image);
 
-static gboolean
-_gst_vaapi_image_unmap(GstVaapiImage *image);
+static gboolean _gst_vaapi_image_unmap (GstVaapiImage * image);
 
 static gboolean
-_gst_vaapi_image_set_image(GstVaapiImage *image, const VAImage *va_image);
+_gst_vaapi_image_set_image (GstVaapiImage * image, const VAImage * va_image);
 
 /*
  * VAImage wrapper
  */
 
 static gboolean
-vaapi_image_is_linear(const VAImage *va_image)
+vaapi_image_is_linear (const VAImage * va_image)
 {
-    guint i, width, height, width2, height2, data_size;
-
-    for (i = 1; i < va_image->num_planes; i++)
-        if (va_image->offsets[i] < va_image->offsets[i - 1])
-            return FALSE;
-
-    width   = va_image->width;
-    height  = va_image->height;
-    width2  = (width  + 1) / 2;
-    height2 = (height + 1) / 2;
-
-    switch (va_image->format.fourcc) {
-    case VA_FOURCC('N','V','1','2'):
-    case VA_FOURCC('Y','V','1','2'):
-    case VA_FOURCC('I','4','2','0'):
-        data_size = width * height + 2 * width2 * height2;
-        break;
-    case VA_FOURCC('Y','U','Y','2'):
-    case VA_FOURCC('U','Y','V','Y'):
-        data_size = 2 * width * height;
-        break;
-    case VA_FOURCC('Y','8','0','0'):
-        data_size = width * height;
-        break;
-    case VA_FOURCC('A','Y','U','V'):
-    case VA_FOURCC('A','R','G','B'):
-    case VA_FOURCC('R','G','B','A'):
-    case VA_FOURCC('A','B','G','R'):
-    case VA_FOURCC('B','G','R','A'):
-    case VA_FOURCC('X','R','G','B'):
-    case VA_FOURCC('R','G','B','X'):
-    case VA_FOURCC('X','B','G','R'):
-    case VA_FOURCC('B','G','R','X'):
-        data_size = 4 * width * height;
-        break;
+  guint i, width, height, width2, height2, data_size;
+
+  for (i = 1; i < va_image->num_planes; i++)
+    if (va_image->offsets[i] < va_image->offsets[i - 1])
+      return FALSE;
+
+  width = va_image->width;
+  height = va_image->height;
+  width2 = (width + 1) / 2;
+  height2 = (height + 1) / 2;
+
+  switch (va_image->format.fourcc) {
+    case VA_FOURCC ('N', 'V', '1', '2'):
+    case VA_FOURCC ('Y', 'V', '1', '2'):
+    case VA_FOURCC ('I', '4', '2', '0'):
+      data_size = width * height + 2 * width2 * height2;
+      break;
+    case VA_FOURCC ('Y', 'U', 'Y', '2'):
+    case VA_FOURCC ('U', 'Y', 'V', 'Y'):
+      data_size = 2 * width * height;
+      break;
+    case VA_FOURCC ('Y', '8', '0', '0'):
+      data_size = width * height;
+      break;
+    case VA_FOURCC ('A', 'Y', 'U', 'V'):
+    case VA_FOURCC ('A', 'R', 'G', 'B'):
+    case VA_FOURCC ('R', 'G', 'B', 'A'):
+    case VA_FOURCC ('A', 'B', 'G', 'R'):
+    case VA_FOURCC ('B', 'G', 'R', 'A'):
+    case VA_FOURCC ('X', 'R', 'G', 'B'):
+    case VA_FOURCC ('R', 'G', 'B', 'X'):
+    case VA_FOURCC ('X', 'B', 'G', 'R'):
+    case VA_FOURCC ('B', 'G', 'R', 'X'):
+      data_size = 4 * width * height;
+      break;
     default:
-        g_error("FIXME: incomplete formats %" GST_FOURCC_FORMAT,
-                GST_FOURCC_ARGS(va_image->format.fourcc));
-        break;
-    }
-    return va_image->data_size == data_size;
+      g_error ("FIXME: incomplete formats %" GST_FOURCC_FORMAT,
+          GST_FOURCC_ARGS (va_image->format.fourcc));
+      break;
+  }
+  return va_image->data_size == data_size;
 }
 
 static void
-gst_vaapi_image_destroy(GstVaapiImage *image)
+gst_vaapi_image_destroy (GstVaapiImage * image)
 {
-    GstVaapiDisplay * const display = GST_VAAPI_OBJECT_DISPLAY(image);
-    VAImageID image_id;
-    VAStatus status;
-
-    _gst_vaapi_image_unmap(image);
-
-    image_id = GST_VAAPI_OBJECT_ID(image);
-    GST_DEBUG("image %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS(image_id));
-
-    if (image_id != VA_INVALID_ID) {
-        GST_VAAPI_DISPLAY_LOCK(display);
-        status = vaDestroyImage(GST_VAAPI_DISPLAY_VADISPLAY(display), image_id);
-        GST_VAAPI_DISPLAY_UNLOCK(display);
-        if (!vaapi_check_status(status, "vaDestroyImage()"))
-            g_warning("failed to destroy image %" GST_VAAPI_ID_FORMAT,
-                      GST_VAAPI_ID_ARGS(image_id));
-        GST_VAAPI_OBJECT_ID(image) = VA_INVALID_ID;
-    }
+  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (image);
+  VAImageID image_id;
+  VAStatus status;
+
+  _gst_vaapi_image_unmap (image);
+
+  image_id = GST_VAAPI_OBJECT_ID (image);
+  GST_DEBUG ("image %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (image_id));
+
+  if (image_id != VA_INVALID_ID) {
+    GST_VAAPI_DISPLAY_LOCK (display);
+    status = vaDestroyImage (GST_VAAPI_DISPLAY_VADISPLAY (display), image_id);
+    GST_VAAPI_DISPLAY_UNLOCK (display);
+    if (!vaapi_check_status (status, "vaDestroyImage()"))
+      g_warning ("failed to destroy image %" GST_VAAPI_ID_FORMAT,
+          GST_VAAPI_ID_ARGS (image_id));
+    GST_VAAPI_OBJECT_ID (image) = VA_INVALID_ID;
+  }
 }
 
 static gboolean
-_gst_vaapi_image_create(GstVaapiImage *image, GstVideoFormat format)
+_gst_vaapi_image_create (GstVaapiImage * image, GstVideoFormat format)
 {
-    GstVaapiDisplay * const display = GST_VAAPI_OBJECT_DISPLAY(image);
-    const VAImageFormat *va_format;
-    VAStatus status;
-
-    if (!gst_vaapi_display_has_image_format(display, format))
-        return FALSE;
-
-    va_format = gst_vaapi_video_format_to_va_format(format);
-    if (!va_format)
-        return FALSE;
-
-    GST_VAAPI_DISPLAY_LOCK(display);
-    status = vaCreateImage(
-        GST_VAAPI_DISPLAY_VADISPLAY(display),
-        (VAImageFormat *)va_format,
-        image->width,
-        image->height,
-        &image->internal_image
-    );
-    GST_VAAPI_DISPLAY_UNLOCK(display);
-    if (status != VA_STATUS_SUCCESS ||
-        image->internal_image.format.fourcc != va_format->fourcc)
-        return FALSE;
-
-    image->internal_format = format;
-    return TRUE;
+  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (image);
+  const VAImageFormat *va_format;
+  VAStatus status;
+
+  if (!gst_vaapi_display_has_image_format (display, format))
+    return FALSE;
+
+  va_format = gst_vaapi_video_format_to_va_format (format);
+  if (!va_format)
+    return FALSE;
+
+  GST_VAAPI_DISPLAY_LOCK (display);
+  status = vaCreateImage (GST_VAAPI_DISPLAY_VADISPLAY (display),
+      (VAImageFormat *) va_format,
+      image->width, image->height, &image->internal_image);
+  GST_VAAPI_DISPLAY_UNLOCK (display);
+  if (status != VA_STATUS_SUCCESS ||
+      image->internal_image.format.fourcc != va_format->fourcc)
+    return FALSE;
+
+  image->internal_format = format;
+  return TRUE;
 }
 
 static gboolean
-gst_vaapi_image_create(GstVaapiImage *image, GstVideoFormat format,
+gst_vaapi_image_create (GstVaapiImage * image, GstVideoFormat format,
     guint width, guint height)
 {
-    const VAImageFormat *va_format;
-    VAImageID image_id;
-
-    image->format = format;
-    image->width  = width;
-    image->height = height;
-
-    if (!_gst_vaapi_image_create(image, format)) {
-        switch (format) {
-        case GST_VIDEO_FORMAT_I420:
-            format = GST_VIDEO_FORMAT_YV12;
-            break;
-        case GST_VIDEO_FORMAT_YV12:
-            format = GST_VIDEO_FORMAT_I420;
-            break;
-        default:
-            format = 0;
-            break;
-        }
-        if (!format || !_gst_vaapi_image_create(image, format))
-            return FALSE;
+  const VAImageFormat *va_format;
+  VAImageID image_id;
+
+  image->format = format;
+  image->width = width;
+  image->height = height;
+
+  if (!_gst_vaapi_image_create (image, format)) {
+    switch (format) {
+      case GST_VIDEO_FORMAT_I420:
+        format = GST_VIDEO_FORMAT_YV12;
+        break;
+      case GST_VIDEO_FORMAT_YV12:
+        format = GST_VIDEO_FORMAT_I420;
+        break;
+      default:
+        format = 0;
+        break;
     }
-    image->image = image->internal_image;
-    image_id     = image->image.image_id;
-
-    if (image->format != image->internal_format) {
-        switch (image->format) {
-        case GST_VIDEO_FORMAT_YV12:
-        case GST_VIDEO_FORMAT_I420:
-            va_format = gst_vaapi_video_format_to_va_format(image->format);
-            if (!va_format)
-                return FALSE;
-            image->image.format = *va_format;
-            SWAP_UINT(image->image.offsets[1], image->image.offsets[2]);
-            SWAP_UINT(image->image.pitches[1], image->image.pitches[2]);
-            break;
-        default:
-            break;
-        }
+    if (!format || !_gst_vaapi_image_create (image, format))
+      return FALSE;
+  }
+  image->image = image->internal_image;
+  image_id = image->image.image_id;
+
+  if (image->format != image->internal_format) {
+    switch (image->format) {
+      case GST_VIDEO_FORMAT_YV12:
+      case GST_VIDEO_FORMAT_I420:
+        va_format = gst_vaapi_video_format_to_va_format (image->format);
+        if (!va_format)
+          return FALSE;
+        image->image.format = *va_format;
+        SWAP_UINT (image->image.offsets[1], image->image.offsets[2]);
+        SWAP_UINT (image->image.pitches[1], image->image.pitches[2]);
+        break;
+      default:
+        break;
     }
-    image->is_linear = vaapi_image_is_linear(&image->image);
+  }
+  image->is_linear = vaapi_image_is_linear (&image->image);
 
-    GST_DEBUG("image %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS(image_id));
-    GST_VAAPI_OBJECT_ID(image) = image_id;
-    return TRUE;
+  GST_DEBUG ("image %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (image_id));
+  GST_VAAPI_OBJECT_ID (image) = image_id;
+  return TRUE;
 }
 
 static void
-gst_vaapi_image_init(GstVaapiImage *image)
+gst_vaapi_image_init (GstVaapiImage * image)
 {
-    image->internal_image.image_id = VA_INVALID_ID;
-    image->internal_image.buf = VA_INVALID_ID;
-    image->image.image_id = VA_INVALID_ID;
-    image->image.buf = VA_INVALID_ID;
+  image->internal_image.image_id = VA_INVALID_ID;
+  image->internal_image.buf = VA_INVALID_ID;
+  image->image.image_id = VA_INVALID_ID;
+  image->image.buf = VA_INVALID_ID;
 }
 
 static void
-gst_vaapi_image_class_init(GstVaapiImageClass *klass)
+gst_vaapi_image_class_init (GstVaapiImageClass * klass)
 {
-    GstVaapiObjectClass * const object_class =
-        GST_VAAPI_OBJECT_CLASS(klass);
+  GstVaapiObjectClass *const object_class = GST_VAAPI_OBJECT_CLASS (klass);
 
-    object_class->init = (GstVaapiObjectInitFunc)gst_vaapi_image_init;
+  object_class->init = (GstVaapiObjectInitFunc) gst_vaapi_image_init;
 }
 
 #define gst_vaapi_image_finalize gst_vaapi_image_destroy
-GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
-    GstVaapiImage,
-    gst_vaapi_image,
-    gst_vaapi_image_class_init(&g_class))
+GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE (GstVaapiImage,
+    gst_vaapi_image, gst_vaapi_image_class_init (&g_class))
 
 /**
  * gst_vaapi_image_new:
@@ -244,33 +236,28 @@ GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
  *
  * Return value: the newly allocated #GstVaapiImage object
  */
-GstVaapiImage *
-gst_vaapi_image_new(
-    GstVaapiDisplay    *display,
-    GstVideoFormat      format,
-    guint               width,
-    guint               height
-)
+     GstVaapiImage *gst_vaapi_image_new (GstVaapiDisplay * display,
+    GstVideoFormat format, guint width, guint height)
 {
-    GstVaapiImage *image;
+  GstVaapiImage *image;
 
-    g_return_val_if_fail(width > 0, NULL);
-    g_return_val_if_fail(height > 0, NULL);
+  g_return_val_if_fail (width > 0, NULL);
+  g_return_val_if_fail (height > 0, NULL);
 
-    GST_DEBUG("format %s, size %ux%u", gst_vaapi_video_format_to_string(format),
-              width, height);
+  GST_DEBUG ("format %s, size %ux%u", gst_vaapi_video_format_to_string (format),
+      width, height);
 
-    image = gst_vaapi_object_new(gst_vaapi_image_class(), display);
-    if (!image)
-        return NULL;
+  image = gst_vaapi_object_new (gst_vaapi_image_class (), display);
+  if (!image)
+    return NULL;
 
-    if (!gst_vaapi_image_create(image, format, width, height))
-        goto error;
-    return image;
+  if (!gst_vaapi_image_create (image, format, width, height))
+    goto error;
+  return image;
 
 error:
-    gst_vaapi_object_unref(image);
-    return NULL;
+  gst_vaapi_object_unref (image);
+  return NULL;
 }
 
 /**
@@ -286,30 +273,30 @@ error:
  * Return value: the newly allocated #GstVaapiImage object
  */
 GstVaapiImage *
-gst_vaapi_image_new_with_image(GstVaapiDisplay *display, VAImage *va_image)
+gst_vaapi_image_new_with_image (GstVaapiDisplay * display, VAImage * va_image)
 {
-    GstVaapiImage *image;
+  GstVaapiImage *image;
 
-    g_return_val_if_fail(va_image, NULL);
-    g_return_val_if_fail(va_image->image_id != VA_INVALID_ID, NULL);
-    g_return_val_if_fail(va_image->buf != VA_INVALID_ID, NULL);
+  g_return_val_if_fail (va_image, NULL);
+  g_return_val_if_fail (va_image->image_id != VA_INVALID_ID, NULL);
+  g_return_val_if_fail (va_image->buf != VA_INVALID_ID, NULL);
 
-    GST_DEBUG("VA image 0x%08x, format %" GST_FOURCC_FORMAT ", size %ux%u",
-              va_image->image_id,
-              GST_FOURCC_ARGS(va_image->format.fourcc),
-              va_image->width, va_image->height);
+  GST_DEBUG ("VA image 0x%08x, format %" GST_FOURCC_FORMAT ", size %ux%u",
+      va_image->image_id,
+      GST_FOURCC_ARGS (va_image->format.fourcc),
+      va_image->width, va_image->height);
 
-    image = gst_vaapi_object_new(gst_vaapi_image_class(), display);
-    if (!image)
-        return NULL;
+  image = gst_vaapi_object_new (gst_vaapi_image_class (), display);
+  if (!image)
+    return NULL;
 
-    if (!_gst_vaapi_image_set_image(image, va_image))
-        goto error;
-    return image;
+  if (!_gst_vaapi_image_set_image (image, va_image))
+    goto error;
+  return image;
 
 error:
-    gst_vaapi_object_unref(image);
-    return NULL;
+  gst_vaapi_object_unref (image);
+  return NULL;
 }
 
 /**
@@ -321,11 +308,11 @@ error:
  * Return value: the underlying VA image id
  */
 GstVaapiID
-gst_vaapi_image_get_id(GstVaapiImage *image)
+gst_vaapi_image_get_id (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, VA_INVALID_ID);
+  g_return_val_if_fail (image != NULL, VA_INVALID_ID);
 
-    return GST_VAAPI_OBJECT_ID(image);
+  return GST_VAAPI_OBJECT_ID (image);
 }
 
 /**
@@ -338,14 +325,14 @@ gst_vaapi_image_get_id(GstVaapiImage *image)
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_get_image(GstVaapiImage *image, VAImage *va_image)
+gst_vaapi_image_get_image (GstVaapiImage * image, VAImage * va_image)
 {
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    if (va_image)
-        *va_image = image->image;
+  if (va_image)
+    *va_image = image->image;
 
-    return TRUE;
+  return TRUE;
 }
 
 /*
@@ -363,55 +350,55 @@ gst_vaapi_image_get_image(GstVaapiImage *image, VAImage *va_image)
  * Return value: %TRUE on success
  */
 gboolean
-_gst_vaapi_image_set_image(GstVaapiImage *image, const VAImage *va_image)
+_gst_vaapi_image_set_image (GstVaapiImage * image, const VAImage * va_image)
 {
-    GstVideoFormat format;
-    VAImage alt_va_image;
-    const VAImageFormat *alt_va_format;
-
-    format = gst_vaapi_video_format_from_va_format(&va_image->format);
-    if (format == GST_VIDEO_FORMAT_UNKNOWN)
-        return FALSE;
-
-    image->internal_image  = *va_image;
-    image->internal_format = format;
-    image->is_linear       = vaapi_image_is_linear(va_image);
-    image->image           = *va_image;
-    image->format          = format;
-    image->width           = va_image->width;
-    image->height          = va_image->height;
-
-    GST_VAAPI_OBJECT_ID(image) = va_image->image_id;
-
-    /* Try to linearize image */
-    if (!image->is_linear) {
-        switch (format) {
-        case GST_VIDEO_FORMAT_I420:
-            format = GST_VIDEO_FORMAT_YV12;
-            break;
-        case GST_VIDEO_FORMAT_YV12:
-            format = GST_VIDEO_FORMAT_I420;
-            break;
-        default:
-            format = 0;
-            break;
-        }
-        if (format &&
-            (alt_va_format = gst_vaapi_video_format_to_va_format(format))) {
-            alt_va_image = *va_image;
-            alt_va_image.format = *alt_va_format;
-            SWAP_UINT(alt_va_image.offsets[1], alt_va_image.offsets[2]);
-            SWAP_UINT(alt_va_image.pitches[1], alt_va_image.pitches[2]);
-            if (vaapi_image_is_linear(&alt_va_image)) {
-                image->image     = alt_va_image;
-                image->format    = format;
-                image->is_linear = TRUE;
-                GST_DEBUG("linearized image to %s format",
-                          gst_vaapi_video_format_to_string(format));
-            }
-        }
+  GstVideoFormat format;
+  VAImage alt_va_image;
+  const VAImageFormat *alt_va_format;
+
+  format = gst_vaapi_video_format_from_va_format (&va_image->format);
+  if (format == GST_VIDEO_FORMAT_UNKNOWN)
+    return FALSE;
+
+  image->internal_image = *va_image;
+  image->internal_format = format;
+  image->is_linear = vaapi_image_is_linear (va_image);
+  image->image = *va_image;
+  image->format = format;
+  image->width = va_image->width;
+  image->height = va_image->height;
+
+  GST_VAAPI_OBJECT_ID (image) = va_image->image_id;
+
+  /* Try to linearize image */
+  if (!image->is_linear) {
+    switch (format) {
+      case GST_VIDEO_FORMAT_I420:
+        format = GST_VIDEO_FORMAT_YV12;
+        break;
+      case GST_VIDEO_FORMAT_YV12:
+        format = GST_VIDEO_FORMAT_I420;
+        break;
+      default:
+        format = 0;
+        break;
     }
-    return TRUE;
+    if (format &&
+        (alt_va_format = gst_vaapi_video_format_to_va_format (format))) {
+      alt_va_image = *va_image;
+      alt_va_image.format = *alt_va_format;
+      SWAP_UINT (alt_va_image.offsets[1], alt_va_image.offsets[2]);
+      SWAP_UINT (alt_va_image.pitches[1], alt_va_image.pitches[2]);
+      if (vaapi_image_is_linear (&alt_va_image)) {
+        image->image = alt_va_image;
+        image->format = format;
+        image->is_linear = TRUE;
+        GST_DEBUG ("linearized image to %s format",
+            gst_vaapi_video_format_to_string (format));
+      }
+    }
+  }
+  return TRUE;
 }
 
 /**
@@ -423,11 +410,11 @@ _gst_vaapi_image_set_image(GstVaapiImage *image, const VAImage *va_image)
  * Return value: the #GstVideoFormat
  */
 GstVideoFormat
-gst_vaapi_image_get_format(GstVaapiImage *image)
+gst_vaapi_image_get_format (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, 0);
+  g_return_val_if_fail (image != NULL, 0);
 
-    return image->format;
+  return image->format;
 }
 
 /**
@@ -439,11 +426,11 @@ gst_vaapi_image_get_format(GstVaapiImage *image)
  * Return value: the image width, in pixels
  */
 guint
-gst_vaapi_image_get_width(GstVaapiImage *image)
+gst_vaapi_image_get_width (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, 0);
+  g_return_val_if_fail (image != NULL, 0);
 
-    return image->width;
+  return image->width;
 }
 
 /**
@@ -455,11 +442,11 @@ gst_vaapi_image_get_width(GstVaapiImage *image)
  * Return value: the image height, in pixels.
  */
 guint
-gst_vaapi_image_get_height(GstVaapiImage *image)
+gst_vaapi_image_get_height (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, 0);
+  g_return_val_if_fail (image != NULL, 0);
 
-    return image->height;
+  return image->height;
 }
 
 /**
@@ -471,15 +458,16 @@ gst_vaapi_image_get_height(GstVaapiImage *image)
  * Retrieves the dimensions of a #GstVaapiImage.
  */
 void
-gst_vaapi_image_get_size(GstVaapiImage *image, guint *pwidth, guint *pheight)
+gst_vaapi_image_get_size (GstVaapiImage * image, guint * pwidth,
+    guint * pheight)
 {
-    g_return_if_fail(image != NULL);
+  g_return_if_fail (image != NULL);
 
-    if (pwidth)
-        *pwidth = image->width;
+  if (pwidth)
+    *pwidth = image->width;
 
-    if (pheight)
-        *pheight = image->height;
+  if (pheight)
+    *pheight = image->height;
 }
 
 /**
@@ -493,11 +481,11 @@ gst_vaapi_image_get_size(GstVaapiImage *image, guint *pwidth, guint *pheight)
  * Return value: %TRUE if image data planes are allocated from a single buffer
  */
 gboolean
-gst_vaapi_image_is_linear(GstVaapiImage *image)
+gst_vaapi_image_is_linear (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    return image->is_linear;
+  return image->is_linear;
 }
 
 /**
@@ -509,17 +497,17 @@ gst_vaapi_image_is_linear(GstVaapiImage *image)
  * Return value: %TRUE if the @image is mapped
  */
 static inline gboolean
-_gst_vaapi_image_is_mapped(GstVaapiImage *image)
+_gst_vaapi_image_is_mapped (GstVaapiImage * image)
 {
-    return image->image_data != NULL;
+  return image->image_data != NULL;
 }
 
 gboolean
-gst_vaapi_image_is_mapped(GstVaapiImage *image)
+gst_vaapi_image_is_mapped (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    return _gst_vaapi_image_is_mapped(image);
+  return _gst_vaapi_image_is_mapped (image);
 }
 
 /**
@@ -532,51 +520,48 @@ gst_vaapi_image_is_mapped(GstVaapiImage *image)
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_map(GstVaapiImage *image)
+gst_vaapi_image_map (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    return _gst_vaapi_image_map(image, NULL);
+  return _gst_vaapi_image_map (image, NULL);
 }
 
 gboolean
-_gst_vaapi_image_map(GstVaapiImage *image, GstVaapiImageRaw *raw_image)
+_gst_vaapi_image_map (GstVaapiImage * image, GstVaapiImageRaw * raw_image)
 {
-    GstVaapiDisplay *display;
-    VAStatus status;
-    guint i;
-
-    if (_gst_vaapi_image_is_mapped(image))
-        goto map_success;
-
-    display = GST_VAAPI_OBJECT_DISPLAY(image);
-    if (!display)
-        return FALSE;
-
-    GST_VAAPI_DISPLAY_LOCK(display);
-    status = vaMapBuffer(
-        GST_VAAPI_DISPLAY_VADISPLAY(display),
-        image->image.buf,
-        (void **)&image->image_data
-    );
-    GST_VAAPI_DISPLAY_UNLOCK(display);
-    if (!vaapi_check_status(status, "vaMapBuffer()"))
-        return FALSE;
+  GstVaapiDisplay *display;
+  VAStatus status;
+  guint i;
+
+  if (_gst_vaapi_image_is_mapped (image))
+    goto map_success;
+
+  display = GST_VAAPI_OBJECT_DISPLAY (image);
+  if (!display)
+    return FALSE;
+
+  GST_VAAPI_DISPLAY_LOCK (display);
+  status = vaMapBuffer (GST_VAAPI_DISPLAY_VADISPLAY (display),
+      image->image.buf, (void **) &image->image_data);
+  GST_VAAPI_DISPLAY_UNLOCK (display);
+  if (!vaapi_check_status (status, "vaMapBuffer()"))
+    return FALSE;
 
 map_success:
-    if (raw_image) {
-        const VAImage * const va_image = &image->image;
-        raw_image->format     = image->format;
-        raw_image->width      = va_image->width;
-        raw_image->height     = va_image->height;
-        raw_image->num_planes = va_image->num_planes;
-        for (i = 0; i < raw_image->num_planes; i++) {
-            raw_image->pixels[i] = (guchar *)image->image_data +
-                va_image->offsets[i];
-            raw_image->stride[i] = va_image->pitches[i];
-        }
+  if (raw_image) {
+    const VAImage *const va_image = &image->image;
+    raw_image->format = image->format;
+    raw_image->width = va_image->width;
+    raw_image->height = va_image->height;
+    raw_image->num_planes = va_image->num_planes;
+    for (i = 0; i < raw_image->num_planes; i++) {
+      raw_image->pixels[i] = (guchar *) image->image_data +
+          va_image->offsets[i];
+      raw_image->stride[i] = va_image->pitches[i];
     }
-    return TRUE;
+  }
+  return TRUE;
 }
 
 /**
@@ -589,37 +574,35 @@ map_success:
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_unmap(GstVaapiImage *image)
+gst_vaapi_image_unmap (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    return _gst_vaapi_image_unmap(image);
+  return _gst_vaapi_image_unmap (image);
 }
 
 gboolean
-_gst_vaapi_image_unmap(GstVaapiImage *image)
+_gst_vaapi_image_unmap (GstVaapiImage * image)
 {
-    GstVaapiDisplay *display;
-    VAStatus status;
-
-    if (!_gst_vaapi_image_is_mapped(image))
-        return TRUE;
-
-    display = GST_VAAPI_OBJECT_DISPLAY(image);
-    if (!display)
-        return FALSE;
-
-    GST_VAAPI_DISPLAY_LOCK(display);
-    status = vaUnmapBuffer(
-        GST_VAAPI_DISPLAY_VADISPLAY(display),
-        image->image.buf
-    );
-    GST_VAAPI_DISPLAY_UNLOCK(display);
-    if (!vaapi_check_status(status, "vaUnmapBuffer()"))
-        return FALSE;
-
-    image->image_data = NULL;
+  GstVaapiDisplay *display;
+  VAStatus status;
+
+  if (!_gst_vaapi_image_is_mapped (image))
     return TRUE;
+
+  display = GST_VAAPI_OBJECT_DISPLAY (image);
+  if (!display)
+    return FALSE;
+
+  GST_VAAPI_DISPLAY_LOCK (display);
+  status = vaUnmapBuffer (GST_VAAPI_DISPLAY_VADISPLAY (display),
+      image->image.buf);
+  GST_VAAPI_DISPLAY_UNLOCK (display);
+  if (!vaapi_check_status (status, "vaUnmapBuffer()"))
+    return FALSE;
+
+  image->image_data = NULL;
+  return TRUE;
 }
 
 /**
@@ -632,12 +615,12 @@ _gst_vaapi_image_unmap(GstVaapiImage *image)
  * Return value: the number of planes available in the @image
  */
 guint
-gst_vaapi_image_get_plane_count(GstVaapiImage *image)
+gst_vaapi_image_get_plane_count (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, 0);
-    g_return_val_if_fail(_gst_vaapi_image_is_mapped(image), 0);
+  g_return_val_if_fail (image != NULL, 0);
+  g_return_val_if_fail (_gst_vaapi_image_is_mapped (image), 0);
 
-    return image->image.num_planes;
+  return image->image.num_planes;
 }
 
 /**
@@ -651,13 +634,13 @@ gst_vaapi_image_get_plane_count(GstVaapiImage *image)
  * Return value: the pixels data of the specified @plane
  */
 guchar *
-gst_vaapi_image_get_plane(GstVaapiImage *image, guint plane)
+gst_vaapi_image_get_plane (GstVaapiImage * image, guint plane)
 {
-    g_return_val_if_fail(image != NULL, NULL);
-    g_return_val_if_fail(_gst_vaapi_image_is_mapped(image), NULL);
-    g_return_val_if_fail(plane < image->image.num_planes, NULL);
+  g_return_val_if_fail (image != NULL, NULL);
+  g_return_val_if_fail (_gst_vaapi_image_is_mapped (image), NULL);
+  g_return_val_if_fail (plane < image->image.num_planes, NULL);
 
-    return image->image_data + image->image.offsets[plane];
+  return image->image_data + image->image.offsets[plane];
 }
 
 /**
@@ -671,13 +654,13 @@ gst_vaapi_image_get_plane(GstVaapiImage *image, guint plane)
  * Return value: the line size (stride) of the specified plane
  */
 guint
-gst_vaapi_image_get_pitch(GstVaapiImage *image, guint plane)
+gst_vaapi_image_get_pitch (GstVaapiImage * image, guint plane)
 {
-    g_return_val_if_fail(image != NULL, 0);
-    g_return_val_if_fail(_gst_vaapi_image_is_mapped(image), 0);
-    g_return_val_if_fail(plane < image->image.num_planes, 0);
+  g_return_val_if_fail (image != NULL, 0);
+  g_return_val_if_fail (_gst_vaapi_image_is_mapped (image), 0);
+  g_return_val_if_fail (plane < image->image.num_planes, 0);
 
-    return image->image.pitches[plane];
+  return image->image.pitches[plane];
 }
 
 /**
@@ -691,198 +674,177 @@ gst_vaapi_image_get_pitch(GstVaapiImage *image, guint plane)
  * Return value: the whole image data size of the @image
  */
 guint
-gst_vaapi_image_get_data_size(GstVaapiImage *image)
+gst_vaapi_image_get_data_size (GstVaapiImage * image)
 {
-    g_return_val_if_fail(image != NULL, 0);
+  g_return_val_if_fail (image != NULL, 0);
 
-    return image->image.data_size;
+  return image->image.data_size;
 }
 
 #include <gst/video/gstvideometa.h>
 
 static gboolean
-init_image_from_video_meta(GstVaapiImageRaw *raw_image, GstVideoMeta *vmeta)
+init_image_from_video_meta (GstVaapiImageRaw * raw_image, GstVideoMeta * vmeta)
 {
-    GST_FIXME("map from GstVideoMeta + add fini_image_from_buffer()");
-    return FALSE;
+  GST_FIXME ("map from GstVideoMeta + add fini_image_from_buffer()");
+  return FALSE;
 }
 
 static gboolean
-init_image_from_buffer(GstVaapiImageRaw *raw_image, GstBuffer *buffer)
+init_image_from_buffer (GstVaapiImageRaw * raw_image, GstBuffer * buffer)
 {
-    GstVideoMeta * const vmeta = gst_buffer_get_video_meta(buffer);
+  GstVideoMeta *const vmeta = gst_buffer_get_video_meta (buffer);
 
-    return vmeta ? init_image_from_video_meta(raw_image, vmeta) : FALSE;
+  return vmeta ? init_image_from_video_meta (raw_image, vmeta) : FALSE;
 }
 
 /* Copy N lines of an image */
 static inline void
-memcpy_pic(
-    guchar       *dst,
-    guint         dst_stride,
-    const guchar *src,
-    guint         src_stride,
-    guint         len,
-    guint         height
-)
+memcpy_pic (guchar * dst,
+    guint dst_stride,
+    const guchar * src, guint src_stride, guint len, guint height)
 {
-    guint i;
+  guint i;
 
-    for (i = 0; i < height; i++)  {
-        memcpy(dst, src, len);
-        dst += dst_stride;
-        src += src_stride;
-    }
+  for (i = 0; i < height; i++) {
+    memcpy (dst, src, len);
+    dst += dst_stride;
+    src += src_stride;
+  }
 }
 
 /* Copy NV12 images */
 static void
-copy_image_NV12(
-    GstVaapiImageRaw        *dst_image,
-    GstVaapiImageRaw        *src_image,
-    const GstVaapiRectangle *rect
-)
+copy_image_NV12 (GstVaapiImageRaw * dst_image,
+    GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
 {
-    guchar *dst, *src;
-    guint dst_stride, src_stride;
-
-    /* Y plane */
-    dst_stride = dst_image->stride[0];
-    dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
-    src_stride = src_image->stride[0];
-    src = src_image->pixels[0] + rect->y * src_stride + rect->x;
-    memcpy_pic(dst, dst_stride, src, src_stride, rect->width, rect->height);
-
-    /* UV plane */
-    dst_stride = dst_image->stride[1];
-    dst = dst_image->pixels[1] + (rect->y / 2) * dst_stride + (rect->x & -2);
-    src_stride = src_image->stride[1];
-    src = src_image->pixels[1] + (rect->y / 2) * src_stride + (rect->x & -2);
-    memcpy_pic(dst, dst_stride, src, src_stride, rect->width, rect->height / 2);
+  guchar *dst, *src;
+  guint dst_stride, src_stride;
+
+  /* Y plane */
+  dst_stride = dst_image->stride[0];
+  dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
+  src_stride = src_image->stride[0];
+  src = src_image->pixels[0] + rect->y * src_stride + rect->x;
+  memcpy_pic (dst, dst_stride, src, src_stride, rect->width, rect->height);
+
+  /* UV plane */
+  dst_stride = dst_image->stride[1];
+  dst = dst_image->pixels[1] + (rect->y / 2) * dst_stride + (rect->x & -2);
+  src_stride = src_image->stride[1];
+  src = src_image->pixels[1] + (rect->y / 2) * src_stride + (rect->x & -2);
+  memcpy_pic (dst, dst_stride, src, src_stride, rect->width, rect->height / 2);
 }
 
 /* Copy YV12 images */
 static void
-copy_image_YV12(
-    GstVaapiImageRaw        *dst_image,
-    GstVaapiImageRaw        *src_image,
-    const GstVaapiRectangle *rect
-)
+copy_image_YV12 (GstVaapiImageRaw * dst_image,
+    GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
 {
-    guchar *dst, *src;
-    guint dst_stride, src_stride;
-    guint i, x, y, w, h;
-
-    /* Y plane */
-    dst_stride = dst_image->stride[0];
-    dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
-    src_stride = src_image->stride[0];
-    src = src_image->pixels[0] + rect->y * src_stride + rect->x;
-    memcpy_pic(dst, dst_stride, src, src_stride, rect->width, rect->height);
-
-    /* U/V planes */
-    x = rect->x / 2;
-    y = rect->y / 2;
-    w = rect->width / 2;
-    h = rect->height / 2;
-    for (i = 1; i < dst_image->num_planes; i++) {
-        dst_stride = dst_image->stride[i];
-        dst = dst_image->pixels[i] + y * dst_stride + x;
-        src_stride = src_image->stride[i];
-        src = src_image->pixels[i] + y * src_stride + x;
-        memcpy_pic(dst, dst_stride, src, src_stride, w, h);
-    }
+  guchar *dst, *src;
+  guint dst_stride, src_stride;
+  guint i, x, y, w, h;
+
+  /* Y plane */
+  dst_stride = dst_image->stride[0];
+  dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
+  src_stride = src_image->stride[0];
+  src = src_image->pixels[0] + rect->y * src_stride + rect->x;
+  memcpy_pic (dst, dst_stride, src, src_stride, rect->width, rect->height);
+
+  /* U/V planes */
+  x = rect->x / 2;
+  y = rect->y / 2;
+  w = rect->width / 2;
+  h = rect->height / 2;
+  for (i = 1; i < dst_image->num_planes; i++) {
+    dst_stride = dst_image->stride[i];
+    dst = dst_image->pixels[i] + y * dst_stride + x;
+    src_stride = src_image->stride[i];
+    src = src_image->pixels[i] + y * src_stride + x;
+    memcpy_pic (dst, dst_stride, src, src_stride, w, h);
+  }
 }
 
 /* Copy YUY2 images */
 static void
-copy_image_YUY2(
-    GstVaapiImageRaw        *dst_image,
-    GstVaapiImageRaw        *src_image,
-    const GstVaapiRectangle *rect
-)
+copy_image_YUY2 (GstVaapiImageRaw * dst_image,
+    GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
 {
-    guchar *dst, *src;
-    guint dst_stride, src_stride;
-
-    /* YUV 4:2:2, full vertical resolution */
-    dst_stride = dst_image->stride[0];
-    dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x * 2;
-    src_stride = src_image->stride[0];
-    src = src_image->pixels[0] + rect->y * src_stride + rect->x * 2;
-    memcpy_pic(dst, dst_stride, src, src_stride, rect->width * 2, rect->height);
+  guchar *dst, *src;
+  guint dst_stride, src_stride;
+
+  /* YUV 4:2:2, full vertical resolution */
+  dst_stride = dst_image->stride[0];
+  dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x * 2;
+  src_stride = src_image->stride[0];
+  src = src_image->pixels[0] + rect->y * src_stride + rect->x * 2;
+  memcpy_pic (dst, dst_stride, src, src_stride, rect->width * 2, rect->height);
 }
 
 /* Copy RGBA images */
 static void
-copy_image_RGBA(
-    GstVaapiImageRaw        *dst_image,
-    GstVaapiImageRaw        *src_image,
-    const GstVaapiRectangle *rect
-)
+copy_image_RGBA (GstVaapiImageRaw * dst_image,
+    GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
 {
-    guchar *dst, *src;
-    guint dst_stride, src_stride;
-
-    dst_stride = dst_image->stride[0];
-    dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
-    src_stride = src_image->stride[0];
-    src = src_image->pixels[0] + rect->y * src_stride + rect->x;
-    memcpy_pic(dst, dst_stride, src, src_stride, 4 * rect->width, rect->height);
+  guchar *dst, *src;
+  guint dst_stride, src_stride;
+
+  dst_stride = dst_image->stride[0];
+  dst = dst_image->pixels[0] + rect->y * dst_stride + rect->x;
+  src_stride = src_image->stride[0];
+  src = src_image->pixels[0] + rect->y * src_stride + rect->x;
+  memcpy_pic (dst, dst_stride, src, src_stride, 4 * rect->width, rect->height);
 }
 
 static gboolean
-copy_image(
-    GstVaapiImageRaw        *dst_image,
-    GstVaapiImageRaw        *src_image,
-    const GstVaapiRectangle *rect
-)
+copy_image (GstVaapiImageRaw * dst_image,
+    GstVaapiImageRaw * src_image, const GstVaapiRectangle * rect)
 {
-    GstVaapiRectangle default_rect;
-
-    if (dst_image->format != src_image->format ||
-        dst_image->width  != src_image->width  ||
-        dst_image->height != src_image->height)
-        return FALSE;
-
-    if (rect) {
-        if (rect->x >= src_image->width ||
-            rect->x + rect->width > src_image->width ||
-            rect->y >= src_image->height ||
-            rect->y + rect->height > src_image->height)
-            return FALSE;
-    }
-    else {
-        default_rect.x      = 0;
-        default_rect.y      = 0;
-        default_rect.width  = src_image->width;
-        default_rect.height = src_image->height;
-        rect                = &default_rect;
-    }
+  GstVaapiRectangle default_rect;
+
+  if (dst_image->format != src_image->format ||
+      dst_image->width != src_image->width ||
+      dst_image->height != src_image->height)
+    return FALSE;
 
-    switch (dst_image->format) {
+  if (rect) {
+    if (rect->x >= src_image->width ||
+        rect->x + rect->width > src_image->width ||
+        rect->y >= src_image->height ||
+        rect->y + rect->height > src_image->height)
+      return FALSE;
+  } else {
+    default_rect.x = 0;
+    default_rect.y = 0;
+    default_rect.width = src_image->width;
+    default_rect.height = src_image->height;
+    rect = &default_rect;
+  }
+
+  switch (dst_image->format) {
     case GST_VIDEO_FORMAT_NV12:
-        copy_image_NV12(dst_image, src_image, rect);
-        break;
+      copy_image_NV12 (dst_image, src_image, rect);
+      break;
     case GST_VIDEO_FORMAT_YV12:
     case GST_VIDEO_FORMAT_I420:
-        copy_image_YV12(dst_image, src_image, rect);
-        break;
+      copy_image_YV12 (dst_image, src_image, rect);
+      break;
     case GST_VIDEO_FORMAT_YUY2:
     case GST_VIDEO_FORMAT_UYVY:
-        copy_image_YUY2(dst_image, src_image, rect);
-        break;
+      copy_image_YUY2 (dst_image, src_image, rect);
+      break;
     case GST_VIDEO_FORMAT_ARGB:
     case GST_VIDEO_FORMAT_RGBA:
     case GST_VIDEO_FORMAT_ABGR:
     case GST_VIDEO_FORMAT_BGRA:
-        copy_image_RGBA(dst_image, src_image, rect);
-        break;
+      copy_image_RGBA (dst_image, src_image, rect);
+      break;
     default:
-        GST_ERROR("unsupported image format for copy");
-        return FALSE;
-    }
-    return TRUE;
+      GST_ERROR ("unsupported image format for copy");
+      return FALSE;
+  }
+  return TRUE;
 }
 
 /**
@@ -898,34 +860,31 @@ copy_image(
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_get_buffer(
-    GstVaapiImage     *image,
-    GstBuffer         *buffer,
-    GstVaapiRectangle *rect
-)
+gst_vaapi_image_get_buffer (GstVaapiImage * image,
+    GstBuffer * buffer, GstVaapiRectangle * rect)
 {
-    GstVaapiImageRaw dst_image, src_image;
-    gboolean success;
+  GstVaapiImageRaw dst_image, src_image;
+  gboolean success;
 
-    g_return_val_if_fail(image != NULL, FALSE);
-    g_return_val_if_fail(GST_IS_BUFFER(buffer), FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
+  g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE);
 
-    if (!init_image_from_buffer(&dst_image, buffer))
-        return FALSE;
-    if (dst_image.format != image->format)
-        return FALSE;
-    if (dst_image.width != image->width || dst_image.height != image->height)
-        return FALSE;
+  if (!init_image_from_buffer (&dst_image, buffer))
+    return FALSE;
+  if (dst_image.format != image->format)
+    return FALSE;
+  if (dst_image.width != image->width || dst_image.height != image->height)
+    return FALSE;
 
-    if (!_gst_vaapi_image_map(image, &src_image))
-        return FALSE;
+  if (!_gst_vaapi_image_map (image, &src_image))
+    return FALSE;
 
-    success = copy_image(&dst_image, &src_image, rect);
+  success = copy_image (&dst_image, &src_image, rect);
 
-    if (!_gst_vaapi_image_unmap(image))
-        return FALSE;
+  if (!_gst_vaapi_image_unmap (image))
+    return FALSE;
 
-    return success;
+  return success;
 }
 
 /**
@@ -941,26 +900,23 @@ gst_vaapi_image_get_buffer(
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_get_raw(
-    GstVaapiImage     *image,
-    GstVaapiImageRaw  *dst_image,
-    GstVaapiRectangle *rect
-)
+gst_vaapi_image_get_raw (GstVaapiImage * image,
+    GstVaapiImageRaw * dst_image, GstVaapiRectangle * rect)
 {
-    GstVaapiImageRaw src_image;
-    gboolean success;
+  GstVaapiImageRaw src_image;
+  gboolean success;
 
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    if (!_gst_vaapi_image_map(image, &src_image))
-        return FALSE;
+  if (!_gst_vaapi_image_map (image, &src_image))
+    return FALSE;
 
-    success = copy_image(dst_image, &src_image, rect);
+  success = copy_image (dst_image, &src_image, rect);
 
-    if (!_gst_vaapi_image_unmap(image))
-        return FALSE;
+  if (!_gst_vaapi_image_unmap (image))
+    return FALSE;
 
-    return success;
+  return success;
 }
 
 /**
@@ -976,34 +932,31 @@ gst_vaapi_image_get_raw(
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_update_from_buffer(
-    GstVaapiImage     *image,
-    GstBuffer         *buffer,
-    GstVaapiRectangle *rect
-)
+gst_vaapi_image_update_from_buffer (GstVaapiImage * image,
+    GstBuffer * buffer, GstVaapiRectangle * rect)
 {
-    GstVaapiImageRaw dst_image, src_image;
-    gboolean success;
+  GstVaapiImageRaw dst_image, src_image;
+  gboolean success;
 
-    g_return_val_if_fail(image != NULL, FALSE);
-    g_return_val_if_fail(GST_IS_BUFFER(buffer), FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
+  g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE);
 
-    if (!init_image_from_buffer(&src_image, buffer))
-        return FALSE;
-    if (src_image.format != image->format)
-        return FALSE;
-    if (src_image.width != image->width || src_image.height != image->height)
-        return FALSE;
+  if (!init_image_from_buffer (&src_image, buffer))
+    return FALSE;
+  if (src_image.format != image->format)
+    return FALSE;
+  if (src_image.width != image->width || src_image.height != image->height)
+    return FALSE;
 
-    if (!_gst_vaapi_image_map(image, &dst_image))
-        return FALSE;
+  if (!_gst_vaapi_image_map (image, &dst_image))
+    return FALSE;
 
-    success = copy_image(&dst_image, &src_image, rect);
+  success = copy_image (&dst_image, &src_image, rect);
 
-    if (!_gst_vaapi_image_unmap(image))
-        return FALSE;
+  if (!_gst_vaapi_image_unmap (image))
+    return FALSE;
 
-    return success;
+  return success;
 }
 
 /**
@@ -1020,26 +973,23 @@ gst_vaapi_image_update_from_buffer(
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_update_from_raw(
-    GstVaapiImage     *image,
-    GstVaapiImageRaw  *src_image,
-    GstVaapiRectangle *rect
-)
+gst_vaapi_image_update_from_raw (GstVaapiImage * image,
+    GstVaapiImageRaw * src_image, GstVaapiRectangle * rect)
 {
-    GstVaapiImageRaw dst_image;
-    gboolean success;
+  GstVaapiImageRaw dst_image;
+  gboolean success;
 
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    if (!_gst_vaapi_image_map(image, &dst_image))
-        return FALSE;
+  if (!_gst_vaapi_image_map (image, &dst_image))
+    return FALSE;
 
-    success = copy_image(&dst_image, src_image, rect);
+  success = copy_image (&dst_image, src_image, rect);
 
-    if (!_gst_vaapi_image_unmap(image))
-        return FALSE;
+  if (!_gst_vaapi_image_unmap (image))
+    return FALSE;
 
-    return success;
+  return success;
 }
 
 /**
@@ -1053,23 +1003,23 @@ gst_vaapi_image_update_from_raw(
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_image_copy(GstVaapiImage *dst_image, GstVaapiImage *src_image)
+gst_vaapi_image_copy (GstVaapiImage * dst_image, GstVaapiImage * src_image)
 {
-    GstVaapiImageRaw dst_image_raw, src_image_raw;
-    gboolean success = FALSE;
+  GstVaapiImageRaw dst_image_raw, src_image_raw;
+  gboolean success = FALSE;
 
-    g_return_val_if_fail(dst_image != NULL, FALSE);
-    g_return_val_if_fail(src_image != NULL, FALSE);
+  g_return_val_if_fail (dst_image != NULL, FALSE);
+  g_return_val_if_fail (src_image != NULL, FALSE);
 
-    if (!_gst_vaapi_image_map(dst_image, &dst_image_raw))
-        goto end;
-    if (!_gst_vaapi_image_map(src_image, &src_image_raw))
-        goto end;
+  if (!_gst_vaapi_image_map (dst_image, &dst_image_raw))
+    goto end;
+  if (!_gst_vaapi_image_map (src_image, &src_image_raw))
+    goto end;
 
-    success = copy_image(&dst_image_raw, &src_image_raw, NULL);
+  success = copy_image (&dst_image_raw, &src_image_raw, NULL);
 
 end:
-    _gst_vaapi_image_unmap(src_image);
-    _gst_vaapi_image_unmap(dst_image);
-    return success;
+  _gst_vaapi_image_unmap (src_image);
+  _gst_vaapi_image_unmap (dst_image);
+  return success;
 }
index 11bc2fe..1f75e81 100644 (file)
 #include "gstvaapiparser_frame.h"
 
 static inline const GstVaapiMiniObjectClass *
-gst_vaapi_parser_frame_class(void)
+gst_vaapi_parser_frame_class (void)
 {
-    static const GstVaapiMiniObjectClass GstVaapiParserFrameClass = {
-        sizeof(GstVaapiParserFrame),
-        (GDestroyNotify)gst_vaapi_parser_frame_free
-    };
-    return &GstVaapiParserFrameClass;
+  static const GstVaapiMiniObjectClass GstVaapiParserFrameClass = {
+    sizeof (GstVaapiParserFrame),
+    (GDestroyNotify) gst_vaapi_parser_frame_free
+  };
+  return &GstVaapiParserFrameClass;
 }
 
 static inline gboolean
-alloc_units(GArray **units_ptr, guint size)
+alloc_units (GArray ** units_ptr, guint size)
 {
-    GArray *units;
+  GArray *units;
 
-    units = g_array_sized_new(FALSE, FALSE, sizeof(GstVaapiDecoderUnit), size);
-    *units_ptr = units;
-    return units != NULL;
+  units = g_array_sized_new (FALSE, FALSE, sizeof (GstVaapiDecoderUnit), size);
+  *units_ptr = units;
+  return units != NULL;
 }
 
 static inline void
-free_units(GArray **units_ptr)
+free_units (GArray ** units_ptr)
 {
-    GArray * const units = *units_ptr;
-    guint i;
-
-    if (units) {
-        for (i = 0; i < units->len; i++) {
-            GstVaapiDecoderUnit * const unit =
-                &g_array_index(units, GstVaapiDecoderUnit, i);
-            gst_vaapi_decoder_unit_clear(unit);
-        }
-        g_array_free(units, TRUE);
-        *units_ptr = NULL;
+  GArray *const units = *units_ptr;
+  guint i;
+
+  if (units) {
+    for (i = 0; i < units->len; i++) {
+      GstVaapiDecoderUnit *const unit =
+          &g_array_index (units, GstVaapiDecoderUnit, i);
+      gst_vaapi_decoder_unit_clear (unit);
     }
+    g_array_free (units, TRUE);
+    *units_ptr = NULL;
+  }
 }
 
 /**
@@ -75,32 +75,32 @@ free_units(GArray **units_ptr)
  * Returns: The newly allocated #GstVaapiParserFrame
  */
 GstVaapiParserFrame *
-gst_vaapi_parser_frame_new(guint width, guint height)
+gst_vaapi_parser_frame_new (guint width, guint height)
 {
-    GstVaapiParserFrame *frame;
-    guint num_slices;
-
-    frame = (GstVaapiParserFrame *)
-        gst_vaapi_mini_object_new(gst_vaapi_parser_frame_class());
-    if (!frame)
-        return NULL;
-
-    if (!height)
-        height = 1088;
-    num_slices = (height + 15) / 16;
-
-    if (!alloc_units(&frame->pre_units, 16))
-        goto error;
-    if (!alloc_units(&frame->units, num_slices))
-        goto error;
-    if (!alloc_units(&frame->post_units, 1))
-        goto error;
-    frame->output_offset = 0;
-    return frame;
+  GstVaapiParserFrame *frame;
+  guint num_slices;
 
-error:
-    gst_vaapi_parser_frame_unref(frame);
+  frame = (GstVaapiParserFrame *)
+      gst_vaapi_mini_object_new (gst_vaapi_parser_frame_class ());
+  if (!frame)
     return NULL;
+
+  if (!height)
+    height = 1088;
+  num_slices = (height + 15) / 16;
+
+  if (!alloc_units (&frame->pre_units, 16))
+    goto error;
+  if (!alloc_units (&frame->units, num_slices))
+    goto error;
+  if (!alloc_units (&frame->post_units, 1))
+    goto error;
+  frame->output_offset = 0;
+  return frame;
+
+error:
+  gst_vaapi_parser_frame_unref (frame);
+  return NULL;
 }
 
 /**
@@ -114,11 +114,11 @@ error:
  * sub-classes.
  */
 void
-gst_vaapi_parser_frame_free(GstVaapiParserFrame *frame)
+gst_vaapi_parser_frame_free (GstVaapiParserFrame * frame)
 {
-    free_units(&frame->units);
-    free_units(&frame->pre_units);
-    free_units(&frame->post_units);
+  free_units (&frame->units);
+  free_units (&frame->pre_units);
+  free_units (&frame->post_units);
 }
 
 /**
@@ -129,19 +129,19 @@ gst_vaapi_parser_frame_free(GstVaapiParserFrame *frame)
  * Appends unit to the @frame.
  */
 void
-gst_vaapi_parser_frame_append_unit(GstVaapiParserFrame *frame,
-    GstVaapiDecoderUnit *unit)
+gst_vaapi_parser_frame_append_unit (GstVaapiParserFrame * frame,
+    GstVaapiDecoderUnit * unit)
 {
-    GArray **unit_array_ptr;
-
-    unit->offset = frame->output_offset;
-    frame->output_offset += unit->size;
-
-    if (GST_VAAPI_DECODER_UNIT_IS_SLICE(unit))
-        unit_array_ptr = &frame->units;
-    else if (GST_VAAPI_DECODER_UNIT_IS_FRAME_END(unit))
-        unit_array_ptr = &frame->post_units;
-    else
-        unit_array_ptr = &frame->pre_units;
-    g_array_append_val(*unit_array_ptr, *unit);
+  GArray **unit_array_ptr;
+
+  unit->offset = frame->output_offset;
+  frame->output_offset += unit->size;
+
+  if (GST_VAAPI_DECODER_UNIT_IS_SLICE (unit))
+    unit_array_ptr = &frame->units;
+  else if (GST_VAAPI_DECODER_UNIT_IS_FRAME_END (unit))
+    unit_array_ptr = &frame->post_units;
+  else
+    unit_array_ptr = &frame->pre_units;
+  g_array_append_val (*unit_array_ptr, *unit);
 }
index e01d033..aba117c 100644 (file)
 #undef gst_vaapi_pixmap_replace
 
 static inline GstVaapiPixmap *
-gst_vaapi_pixmap_new_internal(const GstVaapiPixmapClass *pixmap_class,
-    GstVaapiDisplay *display)
+gst_vaapi_pixmap_new_internal (const GstVaapiPixmapClass * pixmap_class,
+    GstVaapiDisplay * display)
 {
-    g_assert(pixmap_class->create != NULL);
-    g_assert(pixmap_class->render != NULL);
+  g_assert (pixmap_class->create != NULL);
+  g_assert (pixmap_class->render != NULL);
 
-    return gst_vaapi_object_new(GST_VAAPI_OBJECT_CLASS(pixmap_class), display);
+  return gst_vaapi_object_new (GST_VAAPI_OBJECT_CLASS (pixmap_class), display);
 }
 
 GstVaapiPixmap *
-gst_vaapi_pixmap_new(const GstVaapiPixmapClass *pixmap_class,
-    GstVaapiDisplay *display, GstVideoFormat format, guint width, guint height)
+gst_vaapi_pixmap_new (const GstVaapiPixmapClass * pixmap_class,
+    GstVaapiDisplay * display, GstVideoFormat format, guint width, guint height)
 {
-    GstVaapiPixmap *pixmap;
+  GstVaapiPixmap *pixmap;
 
-    g_return_val_if_fail(format != GST_VIDEO_FORMAT_UNKNOWN &&
-                         format != GST_VIDEO_FORMAT_ENCODED, NULL);
-    g_return_val_if_fail(width  > 0, NULL);
-    g_return_val_if_fail(height > 0, NULL);
+  g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN &&
+      format != GST_VIDEO_FORMAT_ENCODED, NULL);
+  g_return_val_if_fail (width > 0, NULL);
+  g_return_val_if_fail (height > 0, NULL);
 
-    pixmap = gst_vaapi_pixmap_new_internal(pixmap_class, display);
-    if (!pixmap)
-        return NULL;
+  pixmap = gst_vaapi_pixmap_new_internal (pixmap_class, display);
+  if (!pixmap)
+    return NULL;
 
-    pixmap->format = format;
-    pixmap->width  = width;
-    pixmap->height = height;
-    if (!pixmap_class->create(pixmap))
-        goto error;
-    return pixmap;
+  pixmap->format = format;
+  pixmap->width = width;
+  pixmap->height = height;
+  if (!pixmap_class->create (pixmap))
+    goto error;
+  return pixmap;
 
 error:
-    gst_vaapi_pixmap_unref_internal(pixmap);
-    return NULL;
+  gst_vaapi_pixmap_unref_internal (pixmap);
+  return NULL;
 }
 
 GstVaapiPixmap *
-gst_vaapi_pixmap_new_from_native(const GstVaapiPixmapClass *pixmap_class,
-    GstVaapiDisplay *display, gpointer native_pixmap)
+gst_vaapi_pixmap_new_from_native (const GstVaapiPixmapClass * pixmap_class,
+    GstVaapiDisplay * display, gpointer native_pixmap)
 {
-    GstVaapiPixmap *pixmap;
+  GstVaapiPixmap *pixmap;
 
-    pixmap = gst_vaapi_pixmap_new_internal(pixmap_class, display);
-    if (!pixmap)
-        return NULL;
+  pixmap = gst_vaapi_pixmap_new_internal (pixmap_class, display);
+  if (!pixmap)
+    return NULL;
 
-    GST_VAAPI_OBJECT_ID(pixmap) = GPOINTER_TO_SIZE(native_pixmap);
-    pixmap->use_foreign_pixmap = TRUE;
-    if (!pixmap_class->create(pixmap))
-        goto error;
-    return pixmap;
+  GST_VAAPI_OBJECT_ID (pixmap) = GPOINTER_TO_SIZE (native_pixmap);
+  pixmap->use_foreign_pixmap = TRUE;
+  if (!pixmap_class->create (pixmap))
+    goto error;
+  return pixmap;
 
 error:
-    gst_vaapi_pixmap_unref_internal(pixmap);
-    return NULL;
+  gst_vaapi_pixmap_unref_internal (pixmap);
+  return NULL;
 }
 
 /**
@@ -105,9 +105,9 @@ error:
  * Returns: The same @pixmap argument
  */
 GstVaapiPixmap *
-gst_vaapi_pixmap_ref(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_ref (GstVaapiPixmap * pixmap)
 {
-    return gst_vaapi_pixmap_ref_internal(pixmap);
+  return gst_vaapi_pixmap_ref_internal (pixmap);
 }
 
 /**
@@ -118,9 +118,9 @@ gst_vaapi_pixmap_ref(GstVaapiPixmap *pixmap)
  * the reference count reaches zero, the pixmap will be free'd.
  */
 void
-gst_vaapi_pixmap_unref(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_unref (GstVaapiPixmap * pixmap)
 {
-    gst_vaapi_pixmap_unref_internal(pixmap);
+  gst_vaapi_pixmap_unref_internal (pixmap);
 }
 
 /**
@@ -133,10 +133,10 @@ gst_vaapi_pixmap_unref(GstVaapiPixmap *pixmap)
  * valid pixmap. However, @new_pixmap can be NULL.
  */
 void
-gst_vaapi_pixmap_replace(GstVaapiPixmap **old_pixmap_ptr,
-    GstVaapiPixmap *new_pixmap)
+gst_vaapi_pixmap_replace (GstVaapiPixmap ** old_pixmap_ptr,
+    GstVaapiPixmap * new_pixmap)
 {
-    gst_vaapi_pixmap_replace_internal(old_pixmap_ptr, new_pixmap);
+  gst_vaapi_pixmap_replace_internal (old_pixmap_ptr, new_pixmap);
 }
 
 /**
@@ -148,11 +148,11 @@ gst_vaapi_pixmap_replace(GstVaapiPixmap **old_pixmap_ptr,
  * Return value: the parent #GstVaapiDisplay object
  */
 GstVaapiDisplay *
-gst_vaapi_pixmap_get_display(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_get_display (GstVaapiPixmap * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, NULL);
+  g_return_val_if_fail (pixmap != NULL, NULL);
 
-    return GST_VAAPI_OBJECT_DISPLAY(pixmap);
+  return GST_VAAPI_OBJECT_DISPLAY (pixmap);
 }
 
 /**
@@ -164,11 +164,11 @@ gst_vaapi_pixmap_get_display(GstVaapiPixmap *pixmap)
  * Return value: the format of the @pixmap
  */
 GstVideoFormat
-gst_vaapi_pixmap_get_format(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_get_format (GstVaapiPixmap * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, GST_VIDEO_FORMAT_UNKNOWN);
+  g_return_val_if_fail (pixmap != NULL, GST_VIDEO_FORMAT_UNKNOWN);
 
-    return GST_VAAPI_PIXMAP_FORMAT(pixmap);
+  return GST_VAAPI_PIXMAP_FORMAT (pixmap);
 }
 
 /**
@@ -180,11 +180,11 @@ gst_vaapi_pixmap_get_format(GstVaapiPixmap *pixmap)
  * Return value: the width of the @pixmap, in pixels
  */
 guint
-gst_vaapi_pixmap_get_width(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_get_width (GstVaapiPixmap * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, 0);
+  g_return_val_if_fail (pixmap != NULL, 0);
 
-    return GST_VAAPI_PIXMAP_WIDTH(pixmap);
+  return GST_VAAPI_PIXMAP_WIDTH (pixmap);
 }
 
 /**
@@ -196,11 +196,11 @@ gst_vaapi_pixmap_get_width(GstVaapiPixmap *pixmap)
  * Return value: the height of the @pixmap, in pixels
  */
 guint
-gst_vaapi_pixmap_get_height(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_get_height (GstVaapiPixmap * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, 0);
+  g_return_val_if_fail (pixmap != NULL, 0);
 
-    return GST_VAAPI_PIXMAP_HEIGHT(pixmap);
+  return GST_VAAPI_PIXMAP_HEIGHT (pixmap);
 }
 
 /**
@@ -212,15 +212,16 @@ gst_vaapi_pixmap_get_height(GstVaapiPixmap *pixmap)
  * Retrieves the dimensions of a #GstVaapiPixmap.
  */
 void
-gst_vaapi_pixmap_get_size(GstVaapiPixmap *pixmap, guint *width, guint *height)
+gst_vaapi_pixmap_get_size (GstVaapiPixmap * pixmap, guint * width,
+    guint * height)
 {
-    g_return_if_fail(pixmap != NULL);
+  g_return_if_fail (pixmap != NULL);
 
-    if (width)
-        *width = GST_VAAPI_PIXMAP_WIDTH(pixmap);
+  if (width)
+    *width = GST_VAAPI_PIXMAP_WIDTH (pixmap);
 
-    if (height)
-        *height = GST_VAAPI_PIXMAP_HEIGHT(pixmap);
+  if (height)
+    *height = GST_VAAPI_PIXMAP_HEIGHT (pixmap);
 }
 
 /**
@@ -240,21 +241,21 @@ gst_vaapi_pixmap_get_size(GstVaapiPixmap *pixmap, guint *width, guint *height)
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_pixmap_put_surface(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
-    const GstVaapiRectangle *crop_rect, guint flags)
+gst_vaapi_pixmap_put_surface (GstVaapiPixmap * pixmap,
+    GstVaapiSurface * surface, const GstVaapiRectangle * crop_rect, guint flags)
 {
-    GstVaapiRectangle src_rect;
-
-    g_return_val_if_fail(pixmap != NULL, FALSE);
-    g_return_val_if_fail(surface != NULL, FALSE);
-
-    if (!crop_rect) {
-        src_rect.x = 0;
-        src_rect.y = 0;
-        src_rect.width = GST_VAAPI_SURFACE_WIDTH(surface);
-        src_rect.height = GST_VAAPI_SURFACE_HEIGHT(surface);
-        crop_rect = &src_rect;
-    }
-    return GST_VAAPI_PIXMAP_GET_CLASS(pixmap)->render(pixmap, surface,
-        crop_rect, flags);
+  GstVaapiRectangle src_rect;
+
+  g_return_val_if_fail (pixmap != NULL, FALSE);
+  g_return_val_if_fail (surface != NULL, FALSE);
+
+  if (!crop_rect) {
+    src_rect.x = 0;
+    src_rect.y = 0;
+    src_rect.width = GST_VAAPI_SURFACE_WIDTH (surface);
+    src_rect.height = GST_VAAPI_SURFACE_HEIGHT (surface);
+    crop_rect = &src_rect;
+  }
+  return GST_VAAPI_PIXMAP_GET_CLASS (pixmap)->render (pixmap, surface,
+      crop_rect, flags);
 }
index 4a14f80..9ffccd9 100644 (file)
 #define DEBUG 1
 #include "gstvaapidebug.h"
 
-typedef struct _GstVaapiPixmapX11Class          GstVaapiPixmapX11Class;
+typedef struct _GstVaapiPixmapX11Class GstVaapiPixmapX11Class;
 
-struct _GstVaapiPixmapX11 {
-    GstVaapiPixmap      parent_instance;
+struct _GstVaapiPixmapX11
+{
+  GstVaapiPixmap parent_instance;
 };
 
-struct _GstVaapiPixmapX11Class {
-    GstVaapiPixmapClass parent_class;
+struct _GstVaapiPixmapX11Class
+{
+  GstVaapiPixmapClass parent_class;
 };
 
 static gboolean
-gst_vaapi_pixmap_x11_create_from_xid(GstVaapiPixmap *pixmap, Pixmap xid)
+gst_vaapi_pixmap_x11_create_from_xid (GstVaapiPixmap * pixmap, Pixmap xid)
 {
-    guint depth;
-    gboolean success;
-
-    if (!xid)
-        return FALSE;
-
-    GST_VAAPI_OBJECT_LOCK_DISPLAY(pixmap);
-    success = x11_get_geometry(GST_VAAPI_OBJECT_NATIVE_DISPLAY(pixmap), xid,
-        NULL, NULL, &pixmap->width, &pixmap->height, &depth);
-    GST_VAAPI_OBJECT_UNLOCK_DISPLAY(pixmap);
-    if (!success)
-        return FALSE;
-
-    pixmap->format = gst_vaapi_display_x11_get_pixmap_format(
-        GST_VAAPI_OBJECT_DISPLAY_X11(pixmap), depth);
-    if (pixmap->format == GST_VIDEO_FORMAT_UNKNOWN)
-        return FALSE;
-    return TRUE;
+  guint depth;
+  gboolean success;
+
+  if (!xid)
+    return FALSE;
+
+  GST_VAAPI_OBJECT_LOCK_DISPLAY (pixmap);
+  success = x11_get_geometry (GST_VAAPI_OBJECT_NATIVE_DISPLAY (pixmap), xid,
+      NULL, NULL, &pixmap->width, &pixmap->height, &depth);
+  GST_VAAPI_OBJECT_UNLOCK_DISPLAY (pixmap);
+  if (!success)
+    return FALSE;
+
+  pixmap->format =
+      gst_vaapi_display_x11_get_pixmap_format (GST_VAAPI_OBJECT_DISPLAY_X11
+      (pixmap), depth);
+  if (pixmap->format == GST_VIDEO_FORMAT_UNKNOWN)
+    return FALSE;
+  return TRUE;
 }
 
 static gboolean
-gst_vaapi_pixmap_x11_create(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_x11_create (GstVaapiPixmap * pixmap)
 {
-    GstVaapiDisplayX11 * const display =
-        GST_VAAPI_DISPLAY_X11(GST_VAAPI_OBJECT_DISPLAY(pixmap));
-    Display * const dpy = GST_VAAPI_OBJECT_NATIVE_DISPLAY(display);
-    Window rootwin;
-    Pixmap xid;
-    guint depth;
-
-    if (pixmap->use_foreign_pixmap)
-        return gst_vaapi_pixmap_x11_create_from_xid(pixmap,
-            GST_VAAPI_OBJECT_ID(pixmap));
-
-    depth = gst_vaapi_display_x11_get_pixmap_depth(display, pixmap->format);
-    if (!depth)
-        return FALSE;
-
-    GST_VAAPI_OBJECT_LOCK_DISPLAY(pixmap);
-    rootwin = RootWindow(dpy, DefaultScreen(dpy));
-    xid = XCreatePixmap(dpy, rootwin, pixmap->width, pixmap->height, depth);
-    GST_VAAPI_OBJECT_UNLOCK_DISPLAY(pixmap);
-
-    GST_DEBUG("xid %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS(xid));
-    GST_VAAPI_OBJECT_ID(pixmap) = xid;
-    return xid != None;
+  GstVaapiDisplayX11 *const display =
+      GST_VAAPI_DISPLAY_X11 (GST_VAAPI_OBJECT_DISPLAY (pixmap));
+  Display *const dpy = GST_VAAPI_OBJECT_NATIVE_DISPLAY (display);
+  Window rootwin;
+  Pixmap xid;
+  guint depth;
+
+  if (pixmap->use_foreign_pixmap)
+    return gst_vaapi_pixmap_x11_create_from_xid (pixmap,
+        GST_VAAPI_OBJECT_ID (pixmap));
+
+  depth = gst_vaapi_display_x11_get_pixmap_depth (display, pixmap->format);
+  if (!depth)
+    return FALSE;
+
+  GST_VAAPI_OBJECT_LOCK_DISPLAY (pixmap);
+  rootwin = RootWindow (dpy, DefaultScreen (dpy));
+  xid = XCreatePixmap (dpy, rootwin, pixmap->width, pixmap->height, depth);
+  GST_VAAPI_OBJECT_UNLOCK_DISPLAY (pixmap);
+
+  GST_DEBUG ("xid %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (xid));
+  GST_VAAPI_OBJECT_ID (pixmap) = xid;
+  return xid != None;
 }
 
 static void
-gst_vaapi_pixmap_x11_destroy(GstVaapiPixmap *pixmap)
+gst_vaapi_pixmap_x11_destroy (GstVaapiPixmap * pixmap)
 {
-    const Pixmap xid = GST_VAAPI_OBJECT_ID(pixmap);
-
-    if (xid) {
-        if (!pixmap->use_foreign_pixmap) {
-            GST_VAAPI_OBJECT_LOCK_DISPLAY(pixmap);
-            XFreePixmap(GST_VAAPI_OBJECT_NATIVE_DISPLAY(pixmap), xid);
-            GST_VAAPI_OBJECT_UNLOCK_DISPLAY(pixmap);
-        }
-        GST_VAAPI_OBJECT_ID(pixmap) = None;
+  const Pixmap xid = GST_VAAPI_OBJECT_ID (pixmap);
+
+  if (xid) {
+    if (!pixmap->use_foreign_pixmap) {
+      GST_VAAPI_OBJECT_LOCK_DISPLAY (pixmap);
+      XFreePixmap (GST_VAAPI_OBJECT_NATIVE_DISPLAY (pixmap), xid);
+      GST_VAAPI_OBJECT_UNLOCK_DISPLAY (pixmap);
     }
+    GST_VAAPI_OBJECT_ID (pixmap) = None;
+  }
 }
 
 static gboolean
-gst_vaapi_pixmap_x11_render(GstVaapiPixmap *pixmap, GstVaapiSurface *surface,
-    const GstVaapiRectangle *crop_rect, guint flags)
+gst_vaapi_pixmap_x11_render (GstVaapiPixmap * pixmap, GstVaapiSurface * surface,
+    const GstVaapiRectangle * crop_rect, guint flags)
 {
-    VASurfaceID surface_id;
-    VAStatus status;
-
-    surface_id = GST_VAAPI_OBJECT_ID(surface);
-    if (surface_id == VA_INVALID_ID)
-        return FALSE;
-
-    GST_VAAPI_OBJECT_LOCK_DISPLAY(pixmap);
-    status = vaPutSurface(
-        GST_VAAPI_OBJECT_VADISPLAY(pixmap),
-        surface_id,
-        GST_VAAPI_OBJECT_ID(pixmap),
-        crop_rect->x, crop_rect->y,
-        crop_rect->width, crop_rect->height,
-        0, 0,
-        GST_VAAPI_PIXMAP_WIDTH(pixmap),
-        GST_VAAPI_PIXMAP_HEIGHT(pixmap),
-        NULL, 0,
-        from_GstVaapiSurfaceRenderFlags(flags)
-    );
-    GST_VAAPI_OBJECT_UNLOCK_DISPLAY(pixmap);
-    if (!vaapi_check_status(status, "vaPutSurface() [pixmap]"))
-        return FALSE;
-    return TRUE;
+  VASurfaceID surface_id;
+  VAStatus status;
+
+  surface_id = GST_VAAPI_OBJECT_ID (surface);
+  if (surface_id == VA_INVALID_ID)
+    return FALSE;
+
+  GST_VAAPI_OBJECT_LOCK_DISPLAY (pixmap);
+  status = vaPutSurface (GST_VAAPI_OBJECT_VADISPLAY (pixmap),
+      surface_id,
+      GST_VAAPI_OBJECT_ID (pixmap),
+      crop_rect->x, crop_rect->y,
+      crop_rect->width, crop_rect->height,
+      0, 0,
+      GST_VAAPI_PIXMAP_WIDTH (pixmap),
+      GST_VAAPI_PIXMAP_HEIGHT (pixmap),
+      NULL, 0, from_GstVaapiSurfaceRenderFlags (flags)
+      );
+  GST_VAAPI_OBJECT_UNLOCK_DISPLAY (pixmap);
+  if (!vaapi_check_status (status, "vaPutSurface() [pixmap]"))
+    return FALSE;
+  return TRUE;
 }
 
 void
-gst_vaapi_pixmap_x11_class_init(GstVaapiPixmapX11Class *klass)
+gst_vaapi_pixmap_x11_class_init (GstVaapiPixmapX11Class * klass)
 {
-    GstVaapiObjectClass * const object_class =
-        GST_VAAPI_OBJECT_CLASS(klass);
-    GstVaapiPixmapClass * const pixmap_class =
-        GST_VAAPI_PIXMAP_CLASS(klass);
+  GstVaapiObjectClass *const object_class = GST_VAAPI_OBJECT_CLASS (klass);
+  GstVaapiPixmapClass *const pixmap_class = GST_VAAPI_PIXMAP_CLASS (klass);
 
-    object_class->finalize = (GstVaapiObjectFinalizeFunc)
-        gst_vaapi_pixmap_x11_destroy;
+  object_class->finalize = (GstVaapiObjectFinalizeFunc)
+      gst_vaapi_pixmap_x11_destroy;
 
-    pixmap_class->create        = gst_vaapi_pixmap_x11_create;
-    pixmap_class->render        = gst_vaapi_pixmap_x11_render;
+  pixmap_class->create = gst_vaapi_pixmap_x11_create;
+  pixmap_class->render = gst_vaapi_pixmap_x11_render;
 }
 
 #define gst_vaapi_pixmap_x11_finalize \
     gst_vaapi_pixmap_x11_destroy
 
-GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
-    GstVaapiPixmapX11,
-    gst_vaapi_pixmap_x11,
-    gst_vaapi_pixmap_x11_class_init(&g_class))
+GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE (GstVaapiPixmapX11,
+    gst_vaapi_pixmap_x11, gst_vaapi_pixmap_x11_class_init (&g_class))
 
 /**
  * gst_vaapi_pixmap_x11_new:
@@ -179,17 +176,17 @@ GST_VAAPI_OBJECT_DEFINE_CLASS_WITH_CODE(
  *
  * Return value: the newly allocated #GstVaapiPixmap object
  */
-GstVaapiPixmap *
-gst_vaapi_pixmap_x11_new(GstVaapiDisplay *display, GstVideoFormat format,
-    guint width, guint height)
+     GstVaapiPixmap *gst_vaapi_pixmap_x11_new (GstVaapiDisplay * display,
+    GstVideoFormat format, guint width, guint height)
 {
-    GST_DEBUG("new pixmap, format %s, size %ux%u",
-              gst_vaapi_video_format_to_string(format), width, height);
+  GST_DEBUG ("new pixmap, format %s, size %ux%u",
+      gst_vaapi_video_format_to_string (format), width, height);
 
-    g_return_val_if_fail(GST_VAAPI_IS_DISPLAY_X11(display), NULL);
+  g_return_val_if_fail (GST_VAAPI_IS_DISPLAY_X11 (display), NULL);
 
-    return gst_vaapi_pixmap_new(GST_VAAPI_PIXMAP_CLASS(
-            gst_vaapi_pixmap_x11_class()), display, format, width, height);
+  return
+      gst_vaapi_pixmap_new (GST_VAAPI_PIXMAP_CLASS (gst_vaapi_pixmap_x11_class
+          ()), display, format, width, height);
 }
 
 /**
@@ -205,15 +202,16 @@ gst_vaapi_pixmap_x11_new(GstVaapiDisplay *display, GstVideoFormat format,
  * Return value: the newly allocated #GstVaapiPixmap object
  */
 GstVaapiPixmap *
-gst_vaapi_pixmap_x11_new_with_xid(GstVaapiDisplay *display, Pixmap xid)
+gst_vaapi_pixmap_x11_new_with_xid (GstVaapiDisplay * display, Pixmap xid)
 {
-    GST_DEBUG("new pixmap from xid 0x%08x", (guint)xid);
+  GST_DEBUG ("new pixmap from xid 0x%08x", (guint) xid);
 
-    g_return_val_if_fail(GST_VAAPI_IS_DISPLAY_X11(display), NULL);
-    g_return_val_if_fail(xid != None, NULL);
+  g_return_val_if_fail (GST_VAAPI_IS_DISPLAY_X11 (display), NULL);
+  g_return_val_if_fail (xid != None, NULL);
 
-    return gst_vaapi_pixmap_new_from_native(GST_VAAPI_PIXMAP_CLASS(
-            gst_vaapi_pixmap_x11_class()), display, GSIZE_TO_POINTER(xid));
+  return
+      gst_vaapi_pixmap_new_from_native (GST_VAAPI_PIXMAP_CLASS
+      (gst_vaapi_pixmap_x11_class ()), display, GSIZE_TO_POINTER (xid));
 }
 
 /**
@@ -227,11 +225,11 @@ gst_vaapi_pixmap_x11_new_with_xid(GstVaapiDisplay *display, Pixmap xid)
  * Return value: the underlying X11 Pixmap bound to @pixmap.
  */
 Pixmap
-gst_vaapi_pixmap_x11_get_xid(GstVaapiPixmapX11 *pixmap)
+gst_vaapi_pixmap_x11_get_xid (GstVaapiPixmapX11 * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, None);
+  g_return_val_if_fail (pixmap != NULL, None);
 
-    return GST_VAAPI_OBJECT_ID(pixmap);
+  return GST_VAAPI_OBJECT_ID (pixmap);
 }
 
 /**
@@ -245,9 +243,9 @@ gst_vaapi_pixmap_x11_get_xid(GstVaapiPixmapX11 *pixmap)
  *   caller (foreign pixmap)
  */
 gboolean
-gst_vaapi_pixmap_x11_is_foreign_xid(GstVaapiPixmapX11 *pixmap)
+gst_vaapi_pixmap_x11_is_foreign_xid (GstVaapiPixmapX11 * pixmap)
 {
-    g_return_val_if_fail(pixmap != NULL, FALSE);
+  g_return_val_if_fail (pixmap != NULL, FALSE);
 
-    return GST_VAAPI_PIXMAP(pixmap)->use_foreign_pixmap;
+  return GST_VAAPI_PIXMAP (pixmap)->use_foreign_pixmap;
 }
index 5ebbb8a..3db05c9 100644 (file)
 #include "gstvaapiprofile.h"
 #include "gstvaapiworkarounds.h"
 
-typedef struct _GstVaapiCodecMap                GstVaapiCodecMap;
-typedef struct _GstVaapiProfileMap              GstVaapiProfileMap;
-typedef struct _GstVaapiEntrypointMap           GstVaapiEntrypointMap;
+typedef struct _GstVaapiCodecMap GstVaapiCodecMap;
+typedef struct _GstVaapiProfileMap GstVaapiProfileMap;
+typedef struct _GstVaapiEntrypointMap GstVaapiEntrypointMap;
 
-struct _GstVaapiCodecMap {
-    GstVaapiCodec               codec;
-    const gchar                *name;
+struct _GstVaapiCodecMap
+{
+  GstVaapiCodec codec;
+  const gchar *name;
 };
 
-struct _GstVaapiProfileMap {
-    GstVaapiProfile             profile;
-    VAProfile                   va_profile;
-    const char                 *media_str;
-    const gchar                *profile_str;
+struct _GstVaapiProfileMap
+{
+  GstVaapiProfile profile;
+  VAProfile va_profile;
+  const char *media_str;
+  const gchar *profile_str;
 };
 
-struct _GstVaapiEntrypointMap {
-    GstVaapiEntrypoint          entrypoint;
-    VAEntrypoint                va_entrypoint;
+struct _GstVaapiEntrypointMap
+{
+  GstVaapiEntrypoint entrypoint;
+  VAEntrypoint va_entrypoint;
 };
 
 /* Codecs */
 static const GstVaapiCodecMap gst_vaapi_codecs[] = {
-    { GST_VAAPI_CODEC_MPEG1,    "mpeg1" },
-    { GST_VAAPI_CODEC_MPEG2,    "mpeg2" },
-    { GST_VAAPI_CODEC_MPEG4,    "mpeg4" },
-    { GST_VAAPI_CODEC_H263,     "h263"  },
-    { GST_VAAPI_CODEC_H264,     "h264"  },
-    { GST_VAAPI_CODEC_WMV3,     "wmv3"  },
-    { GST_VAAPI_CODEC_VC1,      "vc1"   },
-    { GST_VAAPI_CODEC_JPEG,     "jpeg"  },
-    { GST_VAAPI_CODEC_VP8,      "vp8"   },
-    { GST_VAAPI_CODEC_H265,     "h265"  },
-    { GST_VAAPI_CODEC_VP9,      "vp9"   },
-    { 0, }
+  {GST_VAAPI_CODEC_MPEG1, "mpeg1"},
+  {GST_VAAPI_CODEC_MPEG2, "mpeg2"},
+  {GST_VAAPI_CODEC_MPEG4, "mpeg4"},
+  {GST_VAAPI_CODEC_H263, "h263"},
+  {GST_VAAPI_CODEC_H264, "h264"},
+  {GST_VAAPI_CODEC_WMV3, "wmv3"},
+  {GST_VAAPI_CODEC_VC1, "vc1"},
+  {GST_VAAPI_CODEC_JPEG, "jpeg"},
+  {GST_VAAPI_CODEC_VP8, "vp8"},
+  {GST_VAAPI_CODEC_H265, "h265"},
+  {GST_VAAPI_CODEC_VP9, "vp9"},
+  {0,}
 };
 
 /* Profiles */
 static const GstVaapiProfileMap gst_vaapi_profiles[] = {
-    { GST_VAAPI_PROFILE_MPEG2_SIMPLE, VAProfileMPEG2Simple,
-      "video/mpeg, mpegversion=2", "simple"
-    },
-    { GST_VAAPI_PROFILE_MPEG2_MAIN, VAProfileMPEG2Main,
-      "video/mpeg, mpegversion=2", "main"
-    },
-    { GST_VAAPI_PROFILE_MPEG4_SIMPLE, VAProfileMPEG4Simple,
-      "video/mpeg, mpegversion=4", "simple"
-    },
-    { GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
-      "video/mpeg, mpegversion=4", "advanced-simple"
-    },
-    { GST_VAAPI_PROFILE_MPEG4_MAIN, VAProfileMPEG4Main,
-      "video/mpeg, mpegversion=4", "main"
-    },
-    { GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
-      "video/x-divx, divxversion=5", "advanced-simple"
-    },
-    { GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
-      "video/x-xvid", "advanced-simple"
-    },
+  {GST_VAAPI_PROFILE_MPEG2_SIMPLE, VAProfileMPEG2Simple,
+      "video/mpeg, mpegversion=2", "simple"},
+  {GST_VAAPI_PROFILE_MPEG2_MAIN, VAProfileMPEG2Main,
+      "video/mpeg, mpegversion=2", "main"},
+  {GST_VAAPI_PROFILE_MPEG4_SIMPLE, VAProfileMPEG4Simple,
+      "video/mpeg, mpegversion=4", "simple"},
+  {GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
+      "video/mpeg, mpegversion=4", "advanced-simple"},
+  {GST_VAAPI_PROFILE_MPEG4_MAIN, VAProfileMPEG4Main,
+      "video/mpeg, mpegversion=4", "main"},
+  {GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
+      "video/x-divx, divxversion=5", "advanced-simple"},
+  {GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE, VAProfileMPEG4AdvancedSimple,
+      "video/x-xvid", "advanced-simple"},
 #if VA_CHECK_VERSION(0,30,0)
-    { GST_VAAPI_PROFILE_H263_BASELINE, VAProfileH263Baseline,
-      "video/x-h263, variant=itu, h263version=h263", "baseline"
-    },
+  {GST_VAAPI_PROFILE_H263_BASELINE, VAProfileH263Baseline,
+      "video/x-h263, variant=itu, h263version=h263", "baseline"},
 #endif
-    { GST_VAAPI_PROFILE_H264_BASELINE, VAProfileH264Baseline,
-      "video/x-h264", "baseline"
-    },
+  {GST_VAAPI_PROFILE_H264_BASELINE, VAProfileH264Baseline,
+      "video/x-h264", "baseline"},
 #if VA_CHECK_VERSION(0,31,1)
-    { GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE,
-      VAProfileH264ConstrainedBaseline,
-      "video/x-h264", "constrained-baseline"
-    },
+  {GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE,
+        VAProfileH264ConstrainedBaseline,
+      "video/x-h264", "constrained-baseline"},
 #endif
-    { GST_VAAPI_PROFILE_H264_MAIN, VAProfileH264Main,
-      "video/x-h264", "main"
-    },
-    { GST_VAAPI_PROFILE_H264_HIGH, VAProfileH264High,
-      "video/x-h264", "high"
-    },
+  {GST_VAAPI_PROFILE_H264_MAIN, VAProfileH264Main,
+      "video/x-h264", "main"},
+  {GST_VAAPI_PROFILE_H264_HIGH, VAProfileH264High,
+      "video/x-h264", "high"},
 #if VA_CHECK_VERSION(0,35,2)
-    { GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH, VAProfileH264MultiviewHigh,
-      "video/x-h264", "multiview-high"
-    },
-    { GST_VAAPI_PROFILE_H264_STEREO_HIGH, VAProfileH264StereoHigh,
-      "video/x-h264", "stereo-high"
-    },
+  {GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH, VAProfileH264MultiviewHigh,
+      "video/x-h264", "multiview-high"},
+  {GST_VAAPI_PROFILE_H264_STEREO_HIGH, VAProfileH264StereoHigh,
+      "video/x-h264", "stereo-high"},
 #endif
-    { GST_VAAPI_PROFILE_VC1_SIMPLE, VAProfileVC1Simple,
-      "video/x-wmv, wmvversion=3", "simple"
-    },
-    { GST_VAAPI_PROFILE_VC1_MAIN, VAProfileVC1Main,
-      "video/x-wmv, wmvversion=3", "main"
-    },
-    { GST_VAAPI_PROFILE_VC1_ADVANCED, VAProfileVC1Advanced,
-      "video/x-wmv, wmvversion=3, format=(string)WVC1", "advanced"
-    },
+  {GST_VAAPI_PROFILE_VC1_SIMPLE, VAProfileVC1Simple,
+      "video/x-wmv, wmvversion=3", "simple"},
+  {GST_VAAPI_PROFILE_VC1_MAIN, VAProfileVC1Main,
+      "video/x-wmv, wmvversion=3", "main"},
+  {GST_VAAPI_PROFILE_VC1_ADVANCED, VAProfileVC1Advanced,
+      "video/x-wmv, wmvversion=3, format=(string)WVC1", "advanced"},
 #if VA_CHECK_VERSION(0,32,0)
-    { GST_VAAPI_PROFILE_JPEG_BASELINE, VAProfileJPEGBaseline,
-      "image/jpeg", NULL
-    },
+  {GST_VAAPI_PROFILE_JPEG_BASELINE, VAProfileJPEGBaseline,
+      "image/jpeg", NULL},
 #endif
 #if VA_CHECK_VERSION(0,35,0)
-    { GST_VAAPI_PROFILE_VP8, VAProfileVP8Version0_3,
-      "video/x-vp8", NULL
-    },
+  {GST_VAAPI_PROFILE_VP8, VAProfileVP8Version0_3,
+      "video/x-vp8", NULL},
 #endif
 #if VA_CHECK_VERSION(0,37,0)
-    { GST_VAAPI_PROFILE_H265_MAIN, VAProfileHEVCMain,
-      "video/x-h265", "main"
-    },
-    { GST_VAAPI_PROFILE_H265_MAIN10, VAProfileHEVCMain10,
-      "video/x-h265", "main-10"
-    },
+  {GST_VAAPI_PROFILE_H265_MAIN, VAProfileHEVCMain,
+      "video/x-h265", "main"},
+  {GST_VAAPI_PROFILE_H265_MAIN10, VAProfileHEVCMain10,
+      "video/x-h265", "main-10"},
 #endif
 #if VA_CHECK_VERSION(0,38,0)
-    { GST_VAAPI_PROFILE_VP9, VAProfileVP9Profile0,
-      "video/x-vp9", NULL
-    },
+  {GST_VAAPI_PROFILE_VP9, VAProfileVP9Profile0,
+      "video/x-vp9", NULL},
 #endif
-    { 0, }
+  {0,}
 };
 
 /* Entry-points */
 static const GstVaapiEntrypointMap gst_vaapi_entrypoints[] = {
-    { GST_VAAPI_ENTRYPOINT_VLD,            VAEntrypointVLD          },
-    { GST_VAAPI_ENTRYPOINT_IDCT,           VAEntrypointIDCT         },
-    { GST_VAAPI_ENTRYPOINT_MOCO,           VAEntrypointMoComp       },
+  {GST_VAAPI_ENTRYPOINT_VLD, VAEntrypointVLD},
+  {GST_VAAPI_ENTRYPOINT_IDCT, VAEntrypointIDCT},
+  {GST_VAAPI_ENTRYPOINT_MOCO, VAEntrypointMoComp},
 #if VA_CHECK_VERSION(0,30,0)
-    { GST_VAAPI_ENTRYPOINT_SLICE_ENCODE,   VAEntrypointEncSlice     },
-    { GST_VAAPI_ENTRYPOINT_PICTURE_ENCODE, VAEntrypointEncPicture   },
+  {GST_VAAPI_ENTRYPOINT_SLICE_ENCODE, VAEntrypointEncSlice},
+  {GST_VAAPI_ENTRYPOINT_PICTURE_ENCODE, VAEntrypointEncPicture},
 #endif
-    { 0, }
+  {0,}
 };
 
 static const GstVaapiCodecMap *
-get_codecs_map(GstVaapiCodec codec)
+get_codecs_map (GstVaapiCodec codec)
 {
-    const GstVaapiCodecMap *m;
+  const GstVaapiCodecMap *m;
 
-    for (m = gst_vaapi_codecs; m->codec; m++)
-        if (m->codec == codec)
-            return m;
-    return NULL;
+  for (m = gst_vaapi_codecs; m->codec; m++)
+    if (m->codec == codec)
+      return m;
+  return NULL;
 }
 
 static const GstVaapiProfileMap *
-get_profiles_map(GstVaapiProfile profile)
+get_profiles_map (GstVaapiProfile profile)
 {
-    const GstVaapiProfileMap *m;
+  const GstVaapiProfileMap *m;
 
-    for (m = gst_vaapi_profiles; m->profile; m++)
-        if (m->profile == profile)
-            return m;
-    return NULL;
+  for (m = gst_vaapi_profiles; m->profile; m++)
+    if (m->profile == profile)
+      return m;
+  return NULL;
 }
 
 static const GstVaapiEntrypointMap *
-get_entrypoints_map(GstVaapiEntrypoint entrypoint)
+get_entrypoints_map (GstVaapiEntrypoint entrypoint)
 {
-    const GstVaapiEntrypointMap *m;
+  const GstVaapiEntrypointMap *m;
 
-    for (m = gst_vaapi_entrypoints; m->entrypoint; m++)
-        if (m->entrypoint == entrypoint)
-            return m;
-    return NULL;
+  for (m = gst_vaapi_entrypoints; m->entrypoint; m++)
+    if (m->entrypoint == entrypoint)
+      return m;
+  return NULL;
 }
 
 /**
@@ -211,11 +192,11 @@ get_entrypoints_map(GstVaapiEntrypoint entrypoint)
  * Return value: the statically allocated string representation of @codec
  */
 const gchar *
-gst_vaapi_codec_get_name(GstVaapiCodec codec)
+gst_vaapi_codec_get_name (GstVaapiCodec codec)
 {
-    const GstVaapiCodecMap * const m = get_codecs_map(codec);
+  const GstVaapiCodecMap *const m = get_codecs_map (codec);
 
-    return m ? m->name : NULL;
+  return m ? m->name : NULL;
 }
 
 /**
@@ -229,14 +210,14 @@ gst_vaapi_codec_get_name(GstVaapiCodec codec)
  * Return value: the #GstVaapiProfile describing the @profile
  */
 GstVaapiProfile
-gst_vaapi_profile(VAProfile profile)
+gst_vaapi_profile (VAProfile profile)
 {
-    const GstVaapiProfileMap *m;
+  const GstVaapiProfileMap *m;
 
-    for (m = gst_vaapi_profiles; m->profile; m++)
-        if (m->va_profile == profile)
-            return m->profile;
-    return 0;
+  for (m = gst_vaapi_profiles; m->profile; m++)
+    if (m->va_profile == profile)
+      return m->profile;
+  return 0;
 }
 
 /**
@@ -248,11 +229,11 @@ gst_vaapi_profile(VAProfile profile)
  * Return value: the statically allocated string representation of @profile
  */
 const gchar *
-gst_vaapi_profile_get_name(GstVaapiProfile profile)
+gst_vaapi_profile_get_name (GstVaapiProfile profile)
 {
-    const GstVaapiProfileMap * const m = get_profiles_map(profile);
+  const GstVaapiProfileMap *const m = get_profiles_map (profile);
 
-    return m ? m->profile_str : NULL;
+  return m ? m->profile_str : NULL;
 }
 
 /**
@@ -266,11 +247,11 @@ gst_vaapi_profile_get_name(GstVaapiProfile profile)
  *   @profile media type
  */
 const gchar *
-gst_vaapi_profile_get_media_type_name(GstVaapiProfile profile)
+gst_vaapi_profile_get_media_type_name (GstVaapiProfile profile)
 {
-    const GstVaapiProfileMap * const m = get_profiles_map(profile);
+  const GstVaapiProfileMap *const m = get_profiles_map (profile);
 
-    return m ? m->media_str : NULL;
+  return m ? m->media_str : NULL;
 }
 
 /**
@@ -283,73 +264,81 @@ gst_vaapi_profile_get_media_type_name(GstVaapiProfile profile)
  * Return value: the #GstVaapiProfile described in @buffer
  */
 static GstVaapiProfile
-gst_vaapi_profile_from_codec_data_h264(GstBuffer *buffer)
+gst_vaapi_profile_from_codec_data_h264 (GstBuffer * buffer)
 {
-    /* MPEG-4 Part 15: Advanced Video Coding (AVC) file format */
-    guchar buf[3];
-
-    if (gst_buffer_extract(buffer, 0, buf, sizeof(buf)) != sizeof(buf))
-        return 0;
+  /* MPEG-4 Part 15: Advanced Video Coding (AVC) file format */
+  guchar buf[3];
 
-    if (buf[0] != 1)    /* configurationVersion = 1 */
-        return 0;
-
-    switch (buf[1]) {   /* AVCProfileIndication */
-    case 66:    return ((buf[2] & 0x40) ?
-                        GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE :
-                        GST_VAAPI_PROFILE_H264_BASELINE);
-    case 77:    return GST_VAAPI_PROFILE_H264_MAIN;
-    case 100:   return GST_VAAPI_PROFILE_H264_HIGH;
-    case 118:   return GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH;
-    case 128:   return GST_VAAPI_PROFILE_H264_STEREO_HIGH;
+  if (gst_buffer_extract (buffer, 0, buf, sizeof (buf)) != sizeof (buf))
+    return 0;
 
-    }
+  if (buf[0] != 1)              /* configurationVersion = 1 */
     return 0;
+
+  switch (buf[1]) {             /* AVCProfileIndication */
+    case 66:
+      return ((buf[2] & 0x40) ?
+          GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE :
+          GST_VAAPI_PROFILE_H264_BASELINE);
+    case 77:
+      return GST_VAAPI_PROFILE_H264_MAIN;
+    case 100:
+      return GST_VAAPI_PROFILE_H264_HIGH;
+    case 118:
+      return GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH;
+    case 128:
+      return GST_VAAPI_PROFILE_H264_STEREO_HIGH;
+
+  }
+  return 0;
 }
 
 static GstVaapiProfile
-gst_vaapi_profile_from_codec_data_h265(GstBuffer *buffer)
+gst_vaapi_profile_from_codec_data_h265 (GstBuffer * buffer)
 {
-   /* ISO/IEC 14496-15:  HEVC file format */
-    guchar buf[3];
+  /* ISO/IEC 14496-15:  HEVC file format */
+  guchar buf[3];
 
-    if (gst_buffer_extract(buffer, 0, buf, sizeof(buf)) != sizeof(buf))
-        return 0;
-
-    if (buf[0] != 1)    /* configurationVersion = 1 */
-        return 0;
+  if (gst_buffer_extract (buffer, 0, buf, sizeof (buf)) != sizeof (buf))
+    return 0;
 
-    if (buf[1] & 0xc0)  /* general_profile_space = 0 */
-        return 0;
+  if (buf[0] != 1)              /* configurationVersion = 1 */
+    return 0;
 
-    switch (buf[1] & 0x1f) {   /* HEVCProfileIndication */
-    case 1:   return GST_VAAPI_PROFILE_H265_MAIN;
-    case 2:   return GST_VAAPI_PROFILE_H265_MAIN10;
-    case 3:   return GST_VAAPI_PROFILE_H265_MAIN_STILL_PICTURE;
-    }
+  if (buf[1] & 0xc0)            /* general_profile_space = 0 */
     return 0;
+
+  switch (buf[1] & 0x1f) {      /* HEVCProfileIndication */
+    case 1:
+      return GST_VAAPI_PROFILE_H265_MAIN;
+    case 2:
+      return GST_VAAPI_PROFILE_H265_MAIN10;
+    case 3:
+      return GST_VAAPI_PROFILE_H265_MAIN_STILL_PICTURE;
+  }
+  return 0;
 }
 
 static GstVaapiProfile
-gst_vaapi_profile_from_codec_data(GstVaapiCodec codec, GstBuffer *buffer)
+gst_vaapi_profile_from_codec_data (GstVaapiCodec codec, GstBuffer * buffer)
 {
-    GstVaapiProfile profile;
+  GstVaapiProfile profile;
 
-    if (!codec || !buffer)
-        return 0;
+  if (!codec || !buffer)
+    return 0;
 
-    switch (codec) {
+  switch (codec) {
     case GST_VAAPI_CODEC_H264:
-        profile = gst_vaapi_profile_from_codec_data_h264(buffer);
-        break;
+      profile = gst_vaapi_profile_from_codec_data_h264 (buffer);
+      break;
     case GST_VAAPI_CODEC_H265:
-        profile = gst_vaapi_profile_from_codec_data_h265(buffer);
-        break;
+      profile = gst_vaapi_profile_from_codec_data_h265 (buffer);
+      break;
     default:
-        profile = 0;
-        break;
-    }
-    return profile;
+      profile = 0;
+      break;
+  }
+  return profile;
 }
 
 /**
@@ -363,62 +352,60 @@ gst_vaapi_profile_from_codec_data(GstVaapiCodec codec, GstBuffer *buffer)
  * Return value: the #GstVaapiProfile describing the @caps
  */
 GstVaapiProfile
-gst_vaapi_profile_from_caps(const GstCaps *caps)
+gst_vaapi_profile_from_caps (const GstCaps * caps)
 {
-    const GstVaapiProfileMap *m;
-    GstCaps *caps_test;
-    GstStructure *structure;
-    const gchar *profile_str;
-    GstVaapiProfile profile, best_profile;
-    GstBuffer *codec_data = NULL;
-    const gchar *name;
-    gsize namelen;
-
-    if (!caps)
-        return 0;
-
-    structure = gst_caps_get_structure(caps, 0);
-    if (!structure)
-        return 0;
+  const GstVaapiProfileMap *m;
+  GstCaps *caps_test;
+  GstStructure *structure;
+  const gchar *profile_str;
+  GstVaapiProfile profile, best_profile;
+  GstBuffer *codec_data = NULL;
+  const gchar *name;
+  gsize namelen;
+
+  if (!caps)
+    return 0;
 
-    name    = gst_structure_get_name(structure);
-    namelen = strlen(name);
+  structure = gst_caps_get_structure (caps, 0);
+  if (!structure)
+    return 0;
 
-    profile_str = gst_structure_get_string(structure, "profile");
-    if (!profile_str) {
-        const GValue *v_codec_data;
-        v_codec_data = gst_structure_get_value(structure, "codec_data");
-        if (v_codec_data)
-            codec_data = gst_value_get_buffer(v_codec_data);
+  name = gst_structure_get_name (structure);
+  namelen = strlen (name);
+
+  profile_str = gst_structure_get_string (structure, "profile");
+  if (!profile_str) {
+    const GValue *v_codec_data;
+    v_codec_data = gst_structure_get_value (structure, "codec_data");
+    if (v_codec_data)
+      codec_data = gst_value_get_buffer (v_codec_data);
+  }
+
+  profile = 0;
+  best_profile = 0;
+  for (m = gst_vaapi_profiles; !profile && m->profile; m++) {
+    if (strncmp (name, m->media_str, namelen) != 0)
+      continue;
+    caps_test = gst_caps_from_string (m->media_str);
+    if (gst_caps_is_always_compatible (caps, caps_test)) {
+      best_profile = m->profile;
+      if (profile_str && m->profile_str &&
+          strcmp (profile_str, m->profile_str) == 0)
+        profile = best_profile;
     }
-
-    profile = 0;
-    best_profile = 0;
-    for (m = gst_vaapi_profiles; !profile && m->profile; m++) {
-        if (strncmp(name, m->media_str, namelen) != 0)
-            continue;
-        caps_test = gst_caps_from_string(m->media_str);
-        if (gst_caps_is_always_compatible(caps, caps_test)) {
-            best_profile = m->profile;
-            if (profile_str && m->profile_str &&
-                strcmp(profile_str, m->profile_str) == 0)
-                profile = best_profile;
-        }
-        if (!profile) {
-            profile = gst_vaapi_profile_from_codec_data(
-                gst_vaapi_profile_get_codec(m->profile),
-                codec_data
-            );
-            if (!profile &&
-                WORKAROUND_QTDEMUX_NO_H263_PROFILES &&
-                strncmp(name, "video/x-h263", namelen) == 0) {
-                /* HACK: qtdemux does not report profiles for h263 */
-                profile = m->profile;
-            }
-        }
-        gst_caps_unref(caps_test);
+    if (!profile) {
+      profile =
+          gst_vaapi_profile_from_codec_data (gst_vaapi_profile_get_codec
+          (m->profile), codec_data);
+      if (!profile && WORKAROUND_QTDEMUX_NO_H263_PROFILES
+          && strncmp (name, "video/x-h263", namelen) == 0) {
+        /* HACK: qtdemux does not report profiles for h263 */
+        profile = m->profile;
+      }
     }
-    return profile ? profile : best_profile;
+    gst_caps_unref (caps_test);
+  }
+  return profile ? profile : best_profile;
 }
 
 /**
@@ -432,11 +419,11 @@ gst_vaapi_profile_from_caps(const GstCaps *caps)
  * Return value: the VA profile, or -1 if none was found
  */
 VAProfile
-gst_vaapi_profile_get_va_profile(GstVaapiProfile profile)
+gst_vaapi_profile_get_va_profile (GstVaapiProfile profile)
 {
-    const GstVaapiProfileMap * const m = get_profiles_map(profile);
+  const GstVaapiProfileMap *const m = get_profiles_map (profile);
 
-    return m ? m->va_profile : (VAProfile)-1;
+  return m ? m->va_profile : (VAProfile) - 1;
 }
 
 /**
@@ -449,29 +436,25 @@ gst_vaapi_profile_get_va_profile(GstVaapiProfile profile)
  * Return value: the newly allocated #GstCaps, or %NULL if none was found
  */
 GstCaps *
-gst_vaapi_profile_get_caps(GstVaapiProfile profile)
+gst_vaapi_profile_get_caps (GstVaapiProfile profile)
 {
-    const GstVaapiProfileMap *m;
-    GstCaps *out_caps, *caps;
-
-    out_caps = gst_caps_new_empty();
-    if (!out_caps)
-        return NULL;
-
-    for (m = gst_vaapi_profiles; m->profile; m++) {
-        if (m->profile != profile)
-            continue;
-        caps = gst_caps_from_string(m->media_str);
-        if (!caps)
-            continue;
-        gst_caps_set_simple(
-            caps,
-            "profile", G_TYPE_STRING, m->profile_str,
-            NULL
-        );
-        out_caps = gst_caps_merge(out_caps, caps);
-    }
-    return out_caps;
+  const GstVaapiProfileMap *m;
+  GstCaps *out_caps, *caps;
+
+  out_caps = gst_caps_new_empty ();
+  if (!out_caps)
+    return NULL;
+
+  for (m = gst_vaapi_profiles; m->profile; m++) {
+    if (m->profile != profile)
+      continue;
+    caps = gst_caps_from_string (m->media_str);
+    if (!caps)
+      continue;
+    gst_caps_set_simple (caps, "profile", G_TYPE_STRING, m->profile_str, NULL);
+    out_caps = gst_caps_merge (out_caps, caps);
+  }
+  return out_caps;
 }
 
 /**
@@ -483,26 +466,26 @@ gst_vaapi_profile_get_caps(GstVaapiProfile profile)
  * Return value: the #GstVaapiCodec from @profile
  */
 GstVaapiCodec
-gst_vaapi_profile_get_codec(GstVaapiProfile profile)
+gst_vaapi_profile_get_codec (GstVaapiProfile profile)
 {
-    GstVaapiCodec codec;
+  GstVaapiCodec codec;
 
-    switch (profile) {
+  switch (profile) {
     case GST_VAAPI_PROFILE_VC1_SIMPLE:
     case GST_VAAPI_PROFILE_VC1_MAIN:
-        codec = GST_VAAPI_CODEC_WMV3;
-        break;
+      codec = GST_VAAPI_CODEC_WMV3;
+      break;
     case GST_VAAPI_PROFILE_VC1_ADVANCED:
-        codec = GST_VAAPI_CODEC_VC1;
-        break;
+      codec = GST_VAAPI_CODEC_VC1;
+      break;
     case GST_VAAPI_PROFILE_JPEG_BASELINE:
-        codec = GST_VAAPI_CODEC_JPEG;
-        break;
+      codec = GST_VAAPI_CODEC_JPEG;
+      break;
     default:
-        codec = (guint32)profile & GST_MAKE_FOURCC(0xff,0xff,0xff,0);
-        break;
-    }
-    return codec;
+      codec = (guint32) profile & GST_MAKE_FOURCC (0xff, 0xff, 0xff, 0);
+      break;
+  }
+  return codec;
 }
 
 /**
@@ -516,14 +499,14 @@ gst_vaapi_profile_get_codec(GstVaapiProfile profile)
  * Return value: the #GstVaapiEntrypoint describing the @entrypoint
  */
 GstVaapiEntrypoint
-gst_vaapi_entrypoint(VAEntrypoint entrypoint)
+gst_vaapi_entrypoint (VAEntrypoint entrypoint)
 {
-    const GstVaapiEntrypointMap *m;
+  const GstVaapiEntrypointMap *m;
 
-    for (m = gst_vaapi_entrypoints; m->entrypoint; m++)
-        if (m->va_entrypoint == entrypoint)
-            return m->entrypoint;
-    return 0;
+  for (m = gst_vaapi_entrypoints; m->entrypoint; m++)
+    if (m->va_entrypoint == entrypoint)
+      return m->entrypoint;
+  return 0;
 }
 
 /**
@@ -537,9 +520,9 @@ gst_vaapi_entrypoint(VAEntrypoint entrypoint)
  * Return value: the VA entry-point, or -1 if none was found
  */
 VAEntrypoint
-gst_vaapi_entrypoint_get_va_entrypoint(GstVaapiEntrypoint entrypoint)
+gst_vaapi_entrypoint_get_va_entrypoint (GstVaapiEntrypoint entrypoint)
 {
-    const GstVaapiEntrypointMap * const m = get_entrypoints_map(entrypoint);
+  const GstVaapiEntrypointMap *const m = get_entrypoints_map (entrypoint);
 
-    return m ? m->va_entrypoint : (VAEntrypoint)-1;
+  return m ? m->va_entrypoint : (VAEntrypoint) - 1;
 }
index 370ff87..021de15 100644 (file)
 #define DEBUG 1
 #include "gstvaapidebug.h"
 
-typedef struct _GstVaapiSubpictureClass         GstVaapiSubpictureClass;
+typedef struct _GstVaapiSubpictureClass GstVaapiSubpictureClass;
 
 /**
  * GstVaapiSubpicture:
  *
  * A VA subpicture wrapper
  */
-struct _GstVaapiSubpicture {
-    /*< private >*/
-    GstVaapiObject parent_instance;
+struct _GstVaapiSubpicture
+{
+  /*< private > */
+  GstVaapiObject parent_instance;
 
-    GstVaapiImage      *image;
-    guint               flags;
-    gfloat              global_alpha;
+  GstVaapiImage *image;
+  guint flags;
+  gfloat global_alpha;
 };
 
 /**
@@ -59,66 +60,62 @@ struct _GstVaapiSubpicture {
  *
  * A VA subpicture wrapper class
  */
-struct _GstVaapiSubpictureClass {
-    /*< private >*/
-    GstVaapiObjectClass parent_class;
+struct _GstVaapiSubpictureClass
+{
+  /*< private > */
+  GstVaapiObjectClass parent_class;
 };
 
 static void
-gst_vaapi_subpicture_destroy(GstVaapiSubpicture *subpicture)
+gst_vaapi_subpicture_destroy (GstVaapiSubpicture * subpicture)
 {
-    GstVaapiDisplay * const display = GST_VAAPI_OBJECT_DISPLAY(subpicture);
-    VASubpictureID subpicture_id;
-    VAStatus status;
-
-    subpicture_id = GST_VAAPI_OBJECT_ID(subpicture);
-    GST_DEBUG("subpicture %" GST_VAAPI_ID_FORMAT,
-              GST_VAAPI_ID_ARGS(subpicture_id));
-
-    if (subpicture_id != VA_INVALID_ID) {
-        if (display) {
-            GST_VAAPI_DISPLAY_LOCK(display);
-            status = vaDestroySubpicture(
-                GST_VAAPI_DISPLAY_VADISPLAY(display),
-                subpicture_id
-            );
-            GST_VAAPI_DISPLAY_UNLOCK(display);
-            if (!vaapi_check_status(status, "vaDestroySubpicture()"))
-                g_warning("failed to destroy subpicture %" GST_VAAPI_ID_FORMAT,
-                          GST_VAAPI_ID_ARGS(subpicture_id));
-        }
-        GST_VAAPI_OBJECT_ID(subpicture) = VA_INVALID_ID;
+  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (subpicture);
+  VASubpictureID subpicture_id;
+  VAStatus status;
+
+  subpicture_id = GST_VAAPI_OBJECT_ID (subpicture);
+  GST_DEBUG ("subpicture %" GST_VAAPI_ID_FORMAT,
+      GST_VAAPI_ID_ARGS (subpicture_id));
+
+  if (subpicture_id != VA_INVALID_ID) {
+    if (display) {
+      GST_VAAPI_DISPLAY_LOCK (display);
+      status = vaDestroySubpicture (GST_VAAPI_DISPLAY_VADISPLAY (display),
+          subpicture_id);
+      GST_VAAPI_DISPLAY_UNLOCK (display);
+      if (!vaapi_check_status (status, "vaDestroySubpicture()"))
+        g_warning ("failed to destroy subpicture %" GST_VAAPI_ID_FORMAT,
+            GST_VAAPI_ID_ARGS (subpicture_id));
     }
-    gst_vaapi_object_replace(&subpicture->image, NULL);
+    GST_VAAPI_OBJECT_ID (subpicture) = VA_INVALID_ID;
+  }
+  gst_vaapi_object_replace (&subpicture->image, NULL);
 }
 
 static gboolean
-gst_vaapi_subpicture_create(GstVaapiSubpicture *subpicture,
-    GstVaapiImage *image)
+gst_vaapi_subpicture_create (GstVaapiSubpicture * subpicture,
+    GstVaapiImage * image)
 {
-    GstVaapiDisplay * const display = GST_VAAPI_OBJECT_DISPLAY(subpicture);
-    VASubpictureID subpicture_id;
-    VAStatus status;
-
-    GST_VAAPI_DISPLAY_LOCK(display);
-    status = vaCreateSubpicture(
-        GST_VAAPI_DISPLAY_VADISPLAY(display),
-        GST_VAAPI_OBJECT_ID(image),
-        &subpicture_id
-    );
-    GST_VAAPI_DISPLAY_UNLOCK(display);
-    if (!vaapi_check_status(status, "vaCreateSubpicture()"))
-        return FALSE;
-
-    GST_DEBUG("subpicture %" GST_VAAPI_ID_FORMAT,
-              GST_VAAPI_ID_ARGS(subpicture_id));
-    GST_VAAPI_OBJECT_ID(subpicture) = subpicture_id;
-    subpicture->image = gst_vaapi_object_ref(image);
-    return TRUE;
+  GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (subpicture);
+  VASubpictureID subpicture_id;
+  VAStatus status;
+
+  GST_VAAPI_DISPLAY_LOCK (display);
+  status = vaCreateSubpicture (GST_VAAPI_DISPLAY_VADISPLAY (display),
+      GST_VAAPI_OBJECT_ID (image), &subpicture_id);
+  GST_VAAPI_DISPLAY_UNLOCK (display);
+  if (!vaapi_check_status (status, "vaCreateSubpicture()"))
+    return FALSE;
+
+  GST_DEBUG ("subpicture %" GST_VAAPI_ID_FORMAT,
+      GST_VAAPI_ID_ARGS (subpicture_id));
+  GST_VAAPI_OBJECT_ID (subpicture) = subpicture_id;
+  subpicture->image = gst_vaapi_object_ref (image);
+  return TRUE;
 }
 
 #define gst_vaapi_subpicture_finalize gst_vaapi_subpicture_destroy
-GST_VAAPI_OBJECT_DEFINE_CLASS(GstVaapiSubpicture, gst_vaapi_subpicture)
+GST_VAAPI_OBJECT_DEFINE_CLASS (GstVaapiSubpicture, gst_vaapi_subpicture)
 
 /**
  * gst_vaapi_subpicture_new:
@@ -130,38 +127,38 @@ GST_VAAPI_OBJECT_DEFINE_CLASS(GstVaapiSubpicture, gst_vaapi_subpicture)
  *
  * Return value: the newly allocated #GstVaapiSubpicture object
  */
-GstVaapiSubpicture *
-gst_vaapi_subpicture_new(GstVaapiImage *image, guint flags)
+     GstVaapiSubpicture *gst_vaapi_subpicture_new (GstVaapiImage * image,
+    guint flags)
 {
-    GstVaapiSubpicture *subpicture;
-    GstVaapiDisplay *display;
-    GstVideoFormat format;
-    guint va_flags;
+  GstVaapiSubpicture *subpicture;
+  GstVaapiDisplay *display;
+  GstVideoFormat format;
+  guint va_flags;
 
-    g_return_val_if_fail(image != NULL, NULL);
+  g_return_val_if_fail (image != NULL, NULL);
 
-    GST_DEBUG("create from image %" GST_VAAPI_ID_FORMAT,
-              GST_VAAPI_ID_ARGS(GST_VAAPI_OBJECT_ID(image)));
+  GST_DEBUG ("create from image %" GST_VAAPI_ID_FORMAT,
+      GST_VAAPI_ID_ARGS (GST_VAAPI_OBJECT_ID (image)));
 
-    display = GST_VAAPI_OBJECT_DISPLAY(image);
-    format  = GST_VAAPI_IMAGE_FORMAT(image);
-    if (!gst_vaapi_display_has_subpicture_format(display, format, &va_flags))
-        return NULL;
-    if (flags & ~va_flags)
-        return NULL;
+  display = GST_VAAPI_OBJECT_DISPLAY (image);
+  format = GST_VAAPI_IMAGE_FORMAT (image);
+  if (!gst_vaapi_display_has_subpicture_format (display, format, &va_flags))
+    return NULL;
+  if (flags & ~va_flags)
+    return NULL;
 
-    subpicture = gst_vaapi_object_new(gst_vaapi_subpicture_class(), display);
-    if (!subpicture)
-        return NULL;
+  subpicture = gst_vaapi_object_new (gst_vaapi_subpicture_class (), display);
+  if (!subpicture)
+    return NULL;
 
-    subpicture->global_alpha = 1.0f;
-    if (!gst_vaapi_subpicture_set_image(subpicture, image))
-        goto error;
-    return subpicture;
+  subpicture->global_alpha = 1.0f;
+  if (!gst_vaapi_subpicture_set_image (subpicture, image))
+    goto error;
+  return subpicture;
 
 error:
-    gst_vaapi_object_unref(subpicture);
-    return NULL;
+  gst_vaapi_object_unref (subpicture);
+  return NULL;
 }
 
 /**
@@ -177,80 +174,80 @@ error:
  * Return value: the newly allocated #GstVaapiSubpicture object
  */
 GstVaapiSubpicture *
-gst_vaapi_subpicture_new_from_overlay_rectangle(
-    GstVaapiDisplay          *display,
-    GstVideoOverlayRectangle *rect
-)
+gst_vaapi_subpicture_new_from_overlay_rectangle (GstVaapiDisplay * display,
+    GstVideoOverlayRectangle * rect)
 {
-    GstVaapiSubpicture *subpicture;
-    GstVideoFormat format;
-    GstVaapiImage *image;
-    GstVaapiImageRaw raw_image;
-    GstBuffer *buffer;
-    guint8 *data;
-    gfloat global_alpha;
-    guint width, height, stride;
-    guint hw_flags, flags;
-    GstVideoMeta *vmeta;
-    GstMapInfo map_info;
-
-    g_return_val_if_fail(GST_IS_VIDEO_OVERLAY_RECTANGLE(rect), NULL);
-
-    /* XXX: use gst_vaapi_image_format_from_video() */
+  GstVaapiSubpicture *subpicture;
+  GstVideoFormat format;
+  GstVaapiImage *image;
+  GstVaapiImageRaw raw_image;
+  GstBuffer *buffer;
+  guint8 *data;
+  gfloat global_alpha;
+  guint width, height, stride;
+  guint hw_flags, flags;
+  GstVideoMeta *vmeta;
+  GstMapInfo map_info;
+
+  g_return_val_if_fail (GST_IS_VIDEO_OVERLAY_RECTANGLE (rect), NULL);
+
+  /* XXX: use gst_vaapi_image_format_from_video() */
 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
-    format = GST_VIDEO_FORMAT_BGRA;
+  format = GST_VIDEO_FORMAT_BGRA;
 #else
-    format = GST_VIDEO_FORMAT_ARGB;
+  format = GST_VIDEO_FORMAT_ARGB;
 #endif
-    if (!gst_vaapi_display_has_subpicture_format(display, format, &hw_flags))
-        return NULL;
-
-    flags = hw_flags & from_GstVideoOverlayFormatFlags(
-        gst_video_overlay_rectangle_get_flags(rect));
-
-    buffer = gst_video_overlay_rectangle_get_pixels_unscaled_argb(rect,
-        to_GstVideoOverlayFormatFlags(flags));
-    if (!buffer)
-        return NULL;
-
-    vmeta = gst_buffer_get_video_meta(buffer);
-    if (!vmeta)
-        return NULL;
-    width  = vmeta->width;
-    height = vmeta->height;
-
-    if (!gst_video_meta_map(vmeta, 0, &map_info, (gpointer *)&data,
-            (gint *)&stride, GST_MAP_READ))
-        return NULL;
-
-    image = gst_vaapi_image_new(display, format, width, height);
-    if (!image)
-        return NULL;
-
-    raw_image.format     = format;
-    raw_image.width      = width;
-    raw_image.height     = height;
-    raw_image.num_planes = 1;
-    raw_image.pixels[0]  = data;
-    raw_image.stride[0]  = stride;
-    if (!gst_vaapi_image_update_from_raw(image, &raw_image, NULL)) {
-        GST_WARNING("could not update VA image with subtitle data");
-        gst_vaapi_object_unref(image);
-        return NULL;
-    }
+  if (!gst_vaapi_display_has_subpicture_format (display, format, &hw_flags))
+    return NULL;
 
-    subpicture = gst_vaapi_subpicture_new(image, flags);
-    gst_vaapi_object_unref(image);
-    gst_video_meta_unmap(vmeta, 0, &map_info);
-    if (!subpicture)
-        return NULL;
+  flags =
+      hw_flags &
+      from_GstVideoOverlayFormatFlags (gst_video_overlay_rectangle_get_flags
+      (rect));
 
-    if (flags & GST_VAAPI_SUBPICTURE_FLAG_GLOBAL_ALPHA) {
-        global_alpha = gst_video_overlay_rectangle_get_global_alpha(rect);
-        if (!gst_vaapi_subpicture_set_global_alpha(subpicture, global_alpha))
-            return NULL;
-    }
-    return subpicture;
+  buffer = gst_video_overlay_rectangle_get_pixels_unscaled_argb (rect,
+      to_GstVideoOverlayFormatFlags (flags));
+  if (!buffer)
+    return NULL;
+
+  vmeta = gst_buffer_get_video_meta (buffer);
+  if (!vmeta)
+    return NULL;
+  width = vmeta->width;
+  height = vmeta->height;
+
+  if (!gst_video_meta_map (vmeta, 0, &map_info, (gpointer *) & data,
+          (gint *) & stride, GST_MAP_READ))
+    return NULL;
+
+  image = gst_vaapi_image_new (display, format, width, height);
+  if (!image)
+    return NULL;
+
+  raw_image.format = format;
+  raw_image.width = width;
+  raw_image.height = height;
+  raw_image.num_planes = 1;
+  raw_image.pixels[0] = data;
+  raw_image.stride[0] = stride;
+  if (!gst_vaapi_image_update_from_raw (image, &raw_image, NULL)) {
+    GST_WARNING ("could not update VA image with subtitle data");
+    gst_vaapi_object_unref (image);
+    return NULL;
+  }
+
+  subpicture = gst_vaapi_subpicture_new (image, flags);
+  gst_vaapi_object_unref (image);
+  gst_video_meta_unmap (vmeta, 0, &map_info);
+  if (!subpicture)
+    return NULL;
+
+  if (flags & GST_VAAPI_SUBPICTURE_FLAG_GLOBAL_ALPHA) {
+    global_alpha = gst_video_overlay_rectangle_get_global_alpha (rect);
+    if (!gst_vaapi_subpicture_set_global_alpha (subpicture, global_alpha))
+      return NULL;
+  }
+  return subpicture;
 }
 
 /**
@@ -262,11 +259,11 @@ gst_vaapi_subpicture_new_from_overlay_rectangle(
  * Return value: the underlying VA subpicture id
  */
 GstVaapiID
-gst_vaapi_subpicture_get_id(GstVaapiSubpicture *subpicture)
+gst_vaapi_subpicture_get_id (GstVaapiSubpicture * subpicture)
 {
-    g_return_val_if_fail(subpicture != NULL, VA_INVALID_ID);
+  g_return_val_if_fail (subpicture != NULL, VA_INVALID_ID);
 
-    return GST_VAAPI_OBJECT_ID(subpicture);
+  return GST_VAAPI_OBJECT_ID (subpicture);
 }
 
 /**
@@ -278,11 +275,11 @@ gst_vaapi_subpicture_get_id(GstVaapiSubpicture *subpicture)
  * Return value: the @subpicture flags
  */
 guint
-gst_vaapi_subpicture_get_flags(GstVaapiSubpicture *subpicture)
+gst_vaapi_subpicture_get_flags (GstVaapiSubpicture * subpicture)
 {
-    g_return_val_if_fail(subpicture != NULL, 0);
+  g_return_val_if_fail (subpicture != NULL, 0);
 
-    return subpicture->flags;
+  return subpicture->flags;
 }
 
 /**
@@ -294,11 +291,11 @@ gst_vaapi_subpicture_get_flags(GstVaapiSubpicture *subpicture)
  * Return value: the #GstVaapiImage this @subpicture is bound to
  */
 GstVaapiImage *
-gst_vaapi_subpicture_get_image(GstVaapiSubpicture *subpicture)
+gst_vaapi_subpicture_get_image (GstVaapiSubpicture * subpicture)
 {
-    g_return_val_if_fail(subpicture != NULL, NULL);
+  g_return_val_if_fail (subpicture != NULL, NULL);
 
-    return subpicture->image;
+  return subpicture->image;
 }
 
 /**
@@ -312,14 +309,14 @@ gst_vaapi_subpicture_get_image(GstVaapiSubpicture *subpicture)
  * Return value: %TRUE on success
  */
 gboolean
-gst_vaapi_subpicture_set_image(GstVaapiSubpicture *subpicture,
-    GstVaapiImage *image)
+gst_vaapi_subpicture_set_image (GstVaapiSubpicture * subpicture,
+    GstVaapiImage * image)
 {
-    g_return_val_if_fail(subpicture != NULL, FALSE);
-    g_return_val_if_fail(image != NULL, FALSE);
+  g_return_val_if_fail (subpicture != NULL, FALSE);
+  g_return_val_if_fail (image != NULL, FALSE);
 
-    gst_vaapi_subpicture_destroy(subpicture);
-    return gst_vaapi_subpicture_create(subpicture, image);
+  gst_vaapi_subpicture_destroy (subpicture);
+  return gst_vaapi_subpicture_create (subpicture, image);
 }
 
 /**
@@ -331,11 +328,11 @@ gst_vaapi_subpicture_set_image(GstVaapiSubpicture *subpicture,
  * Return value: the global_alpha value of this @subpicture
  */
 gfloat
-gst_vaapi_subpicture_get_global_alpha(GstVaapiSubpicture *subpicture)
+gst_vaapi_subpicture_get_global_alpha (GstVaapiSubpicture * subpicture)
 {
-    g_return_val_if_fail(subpicture != NULL, 1.0);
+  g_return_val_if_fail (subpicture != NULL, 1.0);
 
-    return subpicture->global_alpha;
+  return subpicture->global_alpha;
 }
 
 /**
@@ -350,32 +347,29 @@ gst_vaapi_subpicture_get_global_alpha(GstVaapiSubpicture *subpicture)
  * Return value: %TRUE if global_alpha could be set, %FALSE otherwise
  */
 gboolean
-gst_vaapi_subpicture_set_global_alpha(GstVaapiSubpicture *subpicture,
+gst_vaapi_subpicture_set_global_alpha (GstVaapiSubpicture * subpicture,
     gfloat global_alpha)
 {
-    GstVaapiDisplay *display;
-    VAStatus status;
+  GstVaapiDisplay *display;
+  VAStatus status;
 
-    g_return_val_if_fail(subpicture != NULL, FALSE);
+  g_return_val_if_fail (subpicture != NULL, FALSE);
 
-    if (!(subpicture->flags & GST_VAAPI_SUBPICTURE_FLAG_GLOBAL_ALPHA))
-        return FALSE;
+  if (!(subpicture->flags & GST_VAAPI_SUBPICTURE_FLAG_GLOBAL_ALPHA))
+    return FALSE;
 
-    if (subpicture->global_alpha == global_alpha)
-        return TRUE;
+  if (subpicture->global_alpha == global_alpha)
+    return TRUE;
 
-    display = GST_VAAPI_OBJECT_DISPLAY(subpicture);
+  display = GST_VAAPI_OBJECT_DISPLAY (subpicture);
 
-    GST_VAAPI_DISPLAY_LOCK(display);
-    status = vaSetSubpictureGlobalAlpha(
-        GST_VAAPI_DISPLAY_VADISPLAY(display),
-        GST_VAAPI_OBJECT_ID(subpicture),
-        global_alpha
-    );
-    GST_VAAPI_DISPLAY_UNLOCK(display);
-    if (!vaapi_check_status(status, "vaSetSubpictureGlobalAlpha()"))
-        return FALSE;
+  GST_VAAPI_DISPLAY_LOCK (display);
+  status = vaSetSubpictureGlobalAlpha (GST_VAAPI_DISPLAY_VADISPLAY (display),
+      GST_VAAPI_OBJECT_ID (subpicture), global_alpha);
+  GST_VAAPI_DISPLAY_UNLOCK (display);
+  if (!vaapi_check_status (status, "vaSetSubpictureGlobalAlpha()"))
+    return FALSE;
 
-    subpicture->global_alpha = global_alpha;
-    return TRUE;
+  subpicture->global_alpha = global_alpha;
+  return TRUE;
 }