[Converter] get possible caps from downstream
authorJaeyun <jy1210.jung@samsung.com>
Wed, 21 Nov 2018 10:41:34 +0000 (19:41 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 27 Nov 2018 02:54:10 +0000 (11:54 +0900)
When received caps query, get the possible caps of peer pad and convert it to media caps.
Related issue #818

Signed-off-by: Jaeyun Jung <jy1210.jung@samsung.com>
gst/tensor_converter/tensor_converter.c

index f2f6dce..95e8bb6 100644 (file)
@@ -879,6 +879,29 @@ gst_tensor_converter_reset (GstTensorConverter * self)
 }
 
 /**
+ * @brief Get supported format list.
+ */
+static void
+gst_tensor_converter_get_format_list (GValue * list, ...)
+{
+  GValue item = G_VALUE_INIT;
+  gchar *str;
+  va_list args;
+
+  g_value_init (list, GST_TYPE_LIST);
+
+  va_start (args, list);
+  while ((str = va_arg (args, gchar *))) {
+    g_value_init (&item, G_TYPE_STRING);
+    g_value_set_string (&item, str);
+
+    gst_value_list_append_value (list, &item);
+    g_value_unset (&item);
+  }
+  va_end (args);
+}
+
+/**
  * @brief Get pad caps for caps negotiation.
  */
 static GstCaps *
@@ -887,16 +910,162 @@ gst_tensor_converter_query_caps (GstTensorConverter * self, GstPad * pad,
 {
   GstCaps *caps;
 
-  if (pad == self->sinkpad) {
+  caps = gst_pad_get_current_caps (pad);
+  if (!caps) {
     caps = gst_pad_get_pad_template_caps (pad);
-  } else {
-    caps = gst_tensor_caps_from_config (&self->tensor_config);
+  }
+
+  if (pad == self->sinkpad) {
+    GstCaps *peer_caps;
+
+    /* get possible caps from downstream element */
+    peer_caps = gst_pad_peer_query_caps (self->srcpad, NULL);
+
+    if (peer_caps) {
+      silent_debug_caps (peer_caps, "peer caps");
+
+      if (gst_caps_get_size (peer_caps) > 0) {
+        GstTensorConfig config;
+        GstStructure *st;
+        GstCaps *media_caps, *tmp;
+        guint i, caps_len;
+        media_type type;
+
+        /* get tensor info from peer caps */
+        st = gst_caps_get_structure (peer_caps, 0);
+        gst_tensor_config_from_structure (&config, st);
+
+        /* convert peer caps to possible media caps */
+        media_caps = gst_caps_from_string (GST_TENSOR_MEDIA_CAPS_STR);
+        media_caps = gst_caps_make_writable (media_caps);
+        caps_len = gst_caps_get_size (media_caps);
+
+        for (i = 0; i < caps_len; ++i) {
+          st = gst_caps_get_structure (media_caps, i);
+          type = gst_tensor_media_type_from_structure (st);
+
+          switch (type) {
+            case _NNS_VIDEO:
+              /* video caps from tensor info */
+              if (config.info.type == _NNS_UINT8) {
+                GValue supported_formats = G_VALUE_INIT;
+                gint colorspace, width, height;
+
+                colorspace = config.info.dimension[0];
+                switch (colorspace) {
+                  case 1:
+                    gst_tensor_converter_get_format_list (&supported_formats,
+                        "GRAY8", NULL);
+                    break;
+                  case 3:
+                    gst_tensor_converter_get_format_list (&supported_formats,
+                        "RGB", "BGR", NULL);
+                    break;
+                  case 4:
+                    gst_tensor_converter_get_format_list (&supported_formats,
+                        "RGBx", "BGRx", "xRGB", "xBGR", "RGBA", "BGRA", "ARGB",
+                        "ABGR", NULL);
+                    break;
+                  default:
+                    /* unsupported format, set default video formats */
+                    break;
+                }
+
+                if (gst_value_list_get_size (&supported_formats) > 0) {
+                  gst_structure_set_value (st, "format", &supported_formats);
+                }
+                g_value_unset (&supported_formats);
+
+                if ((width = config.info.dimension[1]) > 0) {
+                  gst_structure_set (st, "width", G_TYPE_INT, width, NULL);
+                }
+
+                if ((height = config.info.dimension[2]) > 0) {
+                  gst_structure_set (st, "height", G_TYPE_INT, height, NULL);
+                }
+
+                if (config.rate_n >= 0 && config.rate_d > 0) {
+                  gst_structure_set (st, "framerate", GST_TYPE_FRACTION,
+                      config.rate_n, config.rate_d, NULL);
+                }
+              }
+              break;
+            case _NNS_AUDIO:
+              /* audio caps from tensor info */
+              if (config.info.type != _NNS_END) {
+                gint channels, samplerate;
+                GstAudioFormat aformat;
+
+                switch (config.info.type) {
+                  case _NNS_INT8:
+                    aformat = GST_AUDIO_FORMAT_S8;
+                    break;
+                  case _NNS_UINT8:
+                    aformat = GST_AUDIO_FORMAT_U8;
+                    break;
+                  case _NNS_INT16:
+                    aformat = GST_AUDIO_FORMAT_S16;
+                    break;
+                  case _NNS_UINT16:
+                    aformat = GST_AUDIO_FORMAT_U16;
+                    break;
+                  case _NNS_INT32:
+                    aformat = GST_AUDIO_FORMAT_S32;
+                    break;
+                  case _NNS_UINT32:
+                    aformat = GST_AUDIO_FORMAT_U32;
+                    break;
+                  case _NNS_FLOAT32:
+                    aformat = GST_AUDIO_FORMAT_F32;
+                    break;
+                  case _NNS_FLOAT64:
+                    aformat = GST_AUDIO_FORMAT_F64;
+                    break;
+                  default:
+                    /* unsupported format */
+                    aformat = GST_AUDIO_FORMAT_UNKNOWN;
+                    break;
+                }
+
+                if (aformat != GST_AUDIO_FORMAT_UNKNOWN) {
+                  gst_structure_set (st, "format", G_TYPE_STRING,
+                      gst_audio_format_to_string (aformat), NULL);
+
+                  if ((channels = config.info.dimension[0]) > 0) {
+                    gst_structure_set (st, "channels", G_TYPE_INT, channels,
+                        NULL);
+                  }
+
+                  if ((samplerate = config.rate_n) > 0) {
+                    gst_structure_set (st, "rate", G_TYPE_INT, samplerate,
+                        NULL);
+                  }
+                }
+              }
+              break;
+            default:
+              /* do nothing for text and octet stream */
+              break;
+          }
+        }
+
+        /* intersect with pad caps */
+        tmp = gst_caps_intersect_full (media_caps, caps,
+            GST_CAPS_INTERSECT_FIRST);
+        gst_caps_unref (caps);
+        caps = tmp;
+
+        gst_caps_unref (media_caps);
+      }
+
+      gst_caps_unref (peer_caps);
+    }
   }
 
   silent_debug_caps (caps, "caps");
   silent_debug_caps (filter, "filter");
 
-  if (caps && filter) {
+  if (filter) {
     GstCaps *intersection;
 
     intersection =
@@ -906,6 +1075,7 @@ gst_tensor_converter_query_caps (GstTensorConverter * self, GstPad * pad,
     caps = intersection;
   }
 
+  silent_debug_caps (caps, "result");
   return caps;
 }