Below functions added.
-_set_need_decoding_for_loopback()
-_destroy_looopback_render_pipeline()
[Version] 0.3.233
[Issue Type] Refactoring
Change-Id: I94b7ac49fb9be0d830e701d6000bbbbe1eeafbb1
void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx);
void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx);
int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height);
+void _set_need_decoding_for_loopback(webrtc_gst_slot_s *source, int av_idx, bool need_decoding);
+void _destroy_looopback_render_pipeline(webrtc_gst_slot_s *source, int av_idx);
/* source */
int _complete_sources(webrtc_s *webrtc);
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.3.232
+Version: 0.3.233
Release: 0
Group: Multimedia/API
License: Apache-2.0
if (source->av[i].pt > 0)
_return_payload_type(source->webrtc, source->av[i].pt);
- if (source->av[i].render.pipeline) {
- gst_element_set_state(source->av[i].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[i].render.pipeline);
- }
+ _destroy_looopback_render_pipeline(source, i);
if (source->av[i].transceiver)
gst_object_unref(source->av[i].transceiver);
if (source->av[av_idx].pt > 0)
_return_payload_type(source->webrtc, source->av[av_idx].pt);
- if (source->av[av_idx].render.pipeline) {
- gst_element_set_state(source->av[av_idx].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[av_idx].render.pipeline);
- }
+ _destroy_looopback_render_pipeline(source, av_idx);
if ((appsrc = gst_bin_get_by_name(source->bin, _get_element_name(av_idx, ELEMENT_APPSRC))))
APPEND_ELEMENT(element_list, appsrc);
goto exit_with_remove_from_bin;
}
- source->av[av_idx].render.need_decoding = need_decoding;
+ _set_need_decoding_for_loopback(source, av_idx, need_decoding);
GET_CAPS_INFO_FROM_PAD(pad, source->av[av_idx].render.appsrc_caps);
_add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), _source_data_probe_cb);
source->sound_stream_info.type = NULL;
}
- gst_element_set_state(source->av[AV_IDX_AUDIO].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
+ _destroy_looopback_render_pipeline(source, AV_IDX_AUDIO);
source->av[AV_IDX_AUDIO].render.appsrc = NULL;
source->display = NULL;
}
- gst_element_set_state(source->av[AV_IDX_VIDEO].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+ _destroy_looopback_render_pipeline(source, AV_IDX_VIDEO);
source->av[AV_IDX_VIDEO].render.appsrc = NULL;
return WEBRTC_ERROR_NONE;
}
+
+void _set_need_decoding_for_loopback(webrtc_gst_slot_s *source, int av_idx, bool need_decoding)
+{
+ RET_IF(source == NULL, "source is NULL");
+
+ source->av[av_idx].render.need_decoding = need_decoding;
+}
+
+void _destroy_looopback_render_pipeline(webrtc_gst_slot_s *source, int av_idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+
+ if (!source->av[av_idx].render.pipeline)
+ return;
+
+ gst_element_set_state(source->av[av_idx].render.pipeline, GST_STATE_NULL);
+ SAFE_GST_OBJECT_UNREF(source->av[av_idx].render.pipeline);
+}
caps_for_render = sink_caps;
}
- source->av[idx].render.need_decoding = true;
+ _set_need_decoding_for_loopback(source, idx, true);
pad_for_render = gst_element_get_static_pad(capsfilter, "src");
goto skip_encoder;
APPEND_ELEMENT(*element_list, encoder);
- source->av[idx].render.need_decoding = false;
-
+ _set_need_decoding_for_loopback(source, idx, false);
skip_encoder:
CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
__make_default_encoded_caps(source, &webrtc->ini, &media_type),