}
static void
-gst_rtp_h265_depay_reset (GstRtpH265Depay * rtph265depay)
+gst_rtp_h265_depay_reset (GstRtpH265Depay * rtph265depay, gboolean hard)
{
gst_adapter_clear (rtph265depay->adapter);
rtph265depay->wait_start = TRUE;
g_ptr_array_set_size (rtph265depay->vps, 0);
g_ptr_array_set_size (rtph265depay->sps, 0);
g_ptr_array_set_size (rtph265depay->pps, 0);
+
+ if (hard) {
+ if (rtph265depay->allocator != NULL) {
+ gst_object_unref (rtph265depay->allocator);
+ rtph265depay->allocator = NULL;
+ }
+ gst_allocation_params_init (&rtph265depay->params);
+ }
}
static void
return TRUE;
}
+static gboolean
+gst_rtp_h265_depay_set_output_caps (GstRtpH265Depay * rtph265depay,
+ GstCaps * caps)
+{
+ GstAllocationParams params;
+ GstAllocator *allocator = NULL;
+ GstPad *srcpad;
+ gboolean res;
+
+ gst_allocation_params_init (¶ms);
+
+ srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph265depay);
+
+ res = gst_pad_set_caps (srcpad, caps);
+
+ if (res) {
+ GstQuery *query;
+
+ query = gst_query_new_allocation (caps, TRUE);
+ if (!gst_pad_peer_query (srcpad, query)) {
+ GST_DEBUG_OBJECT (rtph265depay, "downstream ALLOCATION query failed");
+ }
+
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ }
+
+ gst_query_unref (query);
+ }
+
+ if (rtph265depay->allocator)
+ gst_object_unref (rtph265depay->allocator);
+
+ rtph265depay->allocator = allocator;
+ rtph265depay->params = params;
+
+ return res;
+}
static gboolean
gst_rtp_h265_set_src_caps (GstRtpH265Depay * rtph265depay)
}
if (update_caps) {
- res = gst_pad_set_caps (srcpad, srccaps);
+ res = gst_rtp_h265_depay_set_output_caps (rtph265depay, srccaps);
} else {
res = TRUE;
}
}
static GstBuffer *
+gst_rtp_h265_depay_allocate_output_buffer (GstRtpH265Depay * depay, gsize size)
+{
+ GstBuffer *buffer = NULL;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ GST_LOG_OBJECT (depay, "want output buffer of %u bytes", (guint) size);
+
+ buffer = gst_buffer_new_allocate (depay->allocator, size, &depay->params);
+ if (buffer == NULL) {
+ GST_INFO_OBJECT (depay, "couldn't allocate output buffer");
+ buffer = gst_buffer_new_allocate (NULL, size, NULL);
+ }
+
+ return buffer;
+}
+
+static GstBuffer *
gst_rtp_h265_complete_au (GstRtpH265Depay * rtph265depay,
GstClockTime * out_timestamp, gboolean * out_keyframe)
{
- guint outsize;
+ GstBufferList *list;
+ GstMapInfo outmap;
GstBuffer *outbuf;
+ guint outsize, offset = 0;
+ gint b, n_bufs, m, n_mem;
/* we had a picture in the adapter and we completed it */
GST_DEBUG_OBJECT (rtph265depay, "taking completed AU");
outsize = gst_adapter_available (rtph265depay->picture_adapter);
- outbuf = gst_adapter_take_buffer (rtph265depay->picture_adapter, outsize);
+
+ outbuf = gst_rtp_h265_depay_allocate_output_buffer (rtph265depay, outsize);
+
+ if (outbuf == NULL)
+ return NULL;
+
+ if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE))
+ return NULL;
+
+ list = gst_adapter_take_buffer_list (rtph265depay->picture_adapter, outsize);
+
+ n_bufs = gst_buffer_list_length (list);
+ for (b = 0; b < n_bufs; ++b) {
+ GstBuffer *buf = gst_buffer_list_get (list, b);
+
+ n_mem = gst_buffer_n_memory (buf);
+ for (m = 0; m < n_mem; ++m) {
+ GstMemory *mem = gst_buffer_peek_memory (buf, m);
+ gsize mem_size = gst_memory_get_sizes (mem, NULL, NULL);
+ GstMapInfo mem_map;
+
+ if (gst_memory_map (mem, &mem_map, GST_MAP_READ)) {
+ memcpy (outmap.data + offset, mem_map.data, mem_size);
+ gst_memory_unmap (mem, &mem_map);
+ } else {
+ memset (outmap.data + offset, 0, mem_size);
+ }
+ offset += mem_size;
+ }
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, buf);
+ }
+ gst_buffer_list_unref (list);
+ gst_buffer_unmap (outbuf, &outmap);
*out_timestamp = rtph265depay->last_ts;
*out_keyframe = rtph265depay->last_keyframe;
#define NAL_TYPE_IS_KEY(nt) (NAL_TYPE_IS_PARAMETER_SET(nt) || NAL_TYPE_IS_IRAP(nt))
-static GstBuffer *
+static void
gst_rtp_h265_depay_handle_nal (GstRtpH265Depay * rtph265depay, GstBuffer * nal,
GstClockTime in_timestamp, gboolean marker)
{
4, gst_buffer_get_size (nal) - 4));
gst_buffer_unmap (nal, &map);
gst_buffer_unref (nal);
- return NULL;
+ return;
} else if (rtph265depay->sps->len == 0 || rtph265depay->pps->len == 0) {
/* Down push down any buffer in non-bytestream mode if the SPS/PPS haven't
* go through yet
"all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
gst_buffer_unmap (nal, &map);
gst_buffer_unref (nal);
- return NULL;
+ return;
}
if (rtph265depay->new_codec_data &&
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
else
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ gst_rtp_base_depayload_push (depayload, outbuf);
}
- return outbuf;
+ return;
/* ERRORS */
short_nal:
GST_WARNING_OBJECT (depayload, "dropping short NAL");
gst_buffer_unmap (nal, &map);
gst_buffer_unref (nal);
- return NULL;
+ return;
}
}
-static GstBuffer *
-gst_rtp_h265_push_fragmentation_unit (GstRtpH265Depay * rtph265depay,
- gboolean send)
+static void
+gst_rtp_h265_finish_fragmentation_unit (GstRtpH265Depay * rtph265depay)
{
guint outsize;
GstMapInfo map;
rtph265depay->current_fu_type = 0;
- outbuf = gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf,
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf,
rtph265depay->fu_timestamp, rtph265depay->fu_marker);
- if (send && outbuf) {
- gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtph265depay), outbuf);
- outbuf = NULL;
- }
- return outbuf;
+ return;
not_implemented:
{
GST_ERROR_OBJECT (rtph265depay,
("Only bytestream format is currently supported."));
gst_buffer_unmap (outbuf, &map);
- return NULL;
+ return;
}
}
* when the FU ended) and send out what we gathered thusfar */
if (G_UNLIKELY (rtph265depay->current_fu_type != 0 &&
nal_unit_type != rtph265depay->current_fu_type))
- gst_rtp_h265_push_fragmentation_unit (rtph265depay, TRUE);
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
switch (nal_unit_type) {
case 48:
gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
- outbuf =
- gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp,
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp,
marker);
- if (outbuf)
- gst_adapter_push (rtph265depay->adapter, outbuf);
payload += nalu_size;
payload_len -= nalu_size;
}
-
- outsize = gst_adapter_available (rtph265depay->adapter);
- if (outsize > 0)
- outbuf = gst_adapter_take_buffer (rtph265depay->adapter, outsize);
break;
}
case 49:
* Assume that the remote payloader is buggy (doesn't set the end
* bit) and send out what we've gathered thusfar */
if (G_UNLIKELY (rtph265depay->current_fu_type != 0))
- gst_rtp_h265_push_fragmentation_unit (rtph265depay, TRUE);
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
rtph265depay->current_fu_type = nal_unit_type;
rtph265depay->fu_timestamp = timestamp;
/* if NAL unit ends, flush the adapter */
if (E) {
- outbuf = gst_rtp_h265_push_fragmentation_unit (rtph265depay, FALSE);
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
GST_DEBUG_OBJECT (rtph265depay, "End of Fragmentation Unit");
}
break;
gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
- outbuf = gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp,
- marker);
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp, marker);
break;
}
}
}
- return outbuf;
+ return NULL;
/* ERRORS */
empty_packet:
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
- gst_rtp_h265_depay_reset (rtph265depay);
+ gst_rtp_h265_depay_reset (rtph265depay, FALSE);
break;
default:
break;
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_rtp_h265_depay_reset (rtph265depay);
+ gst_rtp_h265_depay_reset (rtph265depay, TRUE);
break;
default:
break;
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_h265_depay_reset (rtph265depay, TRUE);
+ break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default: