{
if (!SourceId.HasValue)
{
- throw new InvalidOperationException("MediaSource is not attached yet. Call SetSource() first.");
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
}
NativeWebRTC.GetTransceiverDirection(WebRtc.Handle, SourceId.Value, MediaType, out TransceiverDirection mode).
{
if (!SourceId.HasValue)
{
- throw new InvalidOperationException("MediaSource is not attached yet. Call SetSource() first.");
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
}
NativeWebRTC.SetTransceiverDirection(WebRtc.Handle, SourceId.Value, MediaType, value).
{
get
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
+
NativeWebRTC.GetPause(WebRtc.Handle, SourceId.Value, MediaType, out bool isPaused).
ThrowIfFailed("Failed to get pause");
}
set
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
+
NativeWebRTC.SetPause(WebRtc.Handle, SourceId.Value, MediaType, value).
ThrowIfFailed("Failed to set pause");
}
{
get
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
+
NativeWebRTC.GetMute(WebRtc.Handle, SourceId.Value, MediaType, out bool isMuted).
ThrowIfFailed("Failed to get mute");
}
set
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
+
NativeWebRTC.SetMute(WebRtc.Handle, SourceId.Value, MediaType, value).
ThrowIfFailed("Failed to set mute");
}
/// Gets or sets the video resolution of the current media source.
/// </summary>
/// <value>A value that specifies the mute status.</value>
- /// <exception cref="ArgumentException">This source is not video source.</exception>
- /// <exception cref="InvalidOperationException">MediaSource is not attached yet.</exception>
+ /// <exception cref="InvalidOperationException">
+ /// MediaSource is not attached yet.<br/>
+ /// -or-<br/>
+ /// This MediaSource is not Video
+ /// </exception>
/// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception>
/// <since_tizen> 9 </since_tizen>
public Size VideoResolution
{
get
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
if (MediaType != MediaType.Video)
{
- throw new ArgumentException("This property is only for video.");
+ throw new InvalidOperationException("This property is only for video.");
}
NativeWebRTC.GetVideoResolution(WebRtc.Handle, SourceId.Value, out int width, out int height).
}
set
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
if (MediaType != MediaType.Video)
{
- throw new ArgumentException("This property is only for video.");
+ throw new InvalidOperationException("This property is only for video.");
}
NativeWebRTC.SetVideoResolution(WebRtc.Handle, SourceId.Value, value.Width, value.Height).
/// <see cref="AudioStreamType.MediaExternalOnly"/>.<br/>
/// </remarks>
/// <exception cref="ArgumentNullException"><paramref name="policy"/> is null.</exception>
- /// <exception cref="InvalidOperationException">This MediaSource is not Audio</exception>
+ /// <exception cref="InvalidOperationException">
+ /// MediaSource is not attached yet.<br/>
+ /// -or-<br/>
+ /// This MediaSource is not Audio
+ /// </exception>
/// <exception cref="NotSupportedException">
/// <see cref="AudioStreamType"/> of <paramref name="policy"/> is not supported on the current platform.
/// </exception>
/// <returns><see cref="MediaStreamTrack"/></returns>
public MediaStreamTrack EnableAudioLoopback(AudioStreamPolicy policy)
{
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
if (policy == null)
{
throw new ArgumentNullException(nameof(policy));
}
-
if (MediaType != MediaType.Audio)
{
throw new InvalidOperationException("AudioLoopback is only for Audio MediaSource");
/// <param name="display">The <see cref="Display"/> to apply.</param>
/// <exception cref="ArgumentException">The display has already been assigned to another.</exception>
/// <exception cref="ArgumentNullException"><paramref name="display"/> is null.</exception>
- /// <exception cref="InvalidOperationException">This MediaSource is not Video</exception>
+ /// <exception cref="InvalidOperationException">
+ /// MediaSource is not attached yet.<br/>
+ /// -or-<br/>
+ /// This MediaSource is not Video
+ /// </exception>
/// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception>
/// <returns><see cref="MediaStreamTrack"/></returns>
public MediaStreamTrack EnableVideoLoopback(Display display)
{
uint trackId = 0;
+ if (!SourceId.HasValue)
+ {
+ throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first.");
+ }
if (display == null)
{
throw new ArgumentNullException(nameof(display), "Display cannot be null.");
}
-
if (MediaType != MediaType.Video)
{
throw new InvalidOperationException("VideoLoopback is only for Video MediaSource");
{
get
{
+ if (Type != MediaType.Video)
+ {
+ throw new InvalidOperationException("This property is only for video.");
+ }
+
NativeWebRTC.GetDisplayMode(_webRtc.Handle, _trackId, out var val).
ThrowIfFailed("Failed to get WebRTC display mode");
}
set
{
+ if (Type != MediaType.Video)
+ {
+ throw new InvalidOperationException("This property is only for video.");
+ }
+
ValidationUtil.ValidateEnum(typeof(WebRTCDisplayMode), value, nameof(value));
NativeWebRTC.SetDisplayMode(_webRtc.Handle, _trackId, value).
{
get
{
+ if (Type != MediaType.Video)
+ {
+ throw new InvalidOperationException("This property is only for video.");
+ }
+
NativeWebRTC.GetDisplayVisible(_webRtc.Handle, _trackId, out bool val).
ThrowIfFailed("Failed to get visible status");
}
set
{
+ if (Type != MediaType.Video)
+ {
+ throw new InvalidOperationException("This property is only for video.");
+ }
+
NativeWebRTC.SetDisplayVisible(_webRtc.Handle, _trackId, value).
ThrowIfFailed("Failed to set display status.");
}
/// Gets or sets the STUN server url.
/// </summary>
/// <value>The STUN server url</value>
+ /// <exception cref="ArgumentNullException">STUN server URI is null.</exception>
/// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception>
/// <since_tizen> 9 </since_tizen>
public string StunServer
if (_disposed || !disposing)
return;
- if (_source != null && _source.Count > 0)
- {
- try
- {
- Log.Info(WebRTCLog.Tag, "Detach sources");
- foreach (var source in _source)
- {
- source.ReplaceDisplay(null);
- source.DetachFrom(this);
- }
- _source.Clear();
- _source = null;
- }
- catch (Exception ex)
- {
- Log.Error(WebRTCLog.Tag, ex.ToString());
- }
- }
-
if (_handle != null)
{
_handle.Dispose();
+using System.ComponentModel;
/*
* Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
*
private NativeDataChannel.MessageReceivedCallback _webRtcDataChannelMsgRecvCallback;
private NativeDataChannel.ErrorOccurredCallback _webRtcDataChannelErrorOccurredCallback;
+ private event EventHandler<EventArgs> _opened;
+ private event EventHandler<EventArgs> _closed;
+ private event EventHandler<WebRTCDataChannelMessageReceivedEventArgs> _messageReceived;
+ private event EventHandler<WebRTCDataChannelErrorOccurredEventArgs> _errorOccurred;
+
/// <summary>
/// Occurs when the data channel's underlying data transport is established.
/// </summary>
/// <since_tizen> 9 </since_tizen>
- public event EventHandler<EventArgs> Opened;
+ public event EventHandler<EventArgs> Opened
+ {
+ add
+ {
+ if (_opened == null)
+ {
+ RegisterDataChannelOpenedCallback();
+ }
+ _opened += value;
+ }
+ remove
+ {
+ _opened -= value;
+ if (_opened == null)
+ {
+ UnregisterDataChannelOpenedCallback();
+ }
+ }
+ }
/// <summary>
/// Occurs when the data channel has closed down.
/// </summary>
/// <since_tizen> 9 </since_tizen>
- public event EventHandler<EventArgs> Closed;
+ public event EventHandler<EventArgs> Closed
+ {
+ add
+ {
+ if (_closed == null)
+ {
+ RegisterDataChannelClosedCallback();
+ }
+ _closed += value;
+ }
+ remove
+ {
+ _closed -= value;
+ if (_closed == null)
+ {
+ UnregisterDataChannelClosedCallback();
+ }
+ }
+ }
/// <summary>
/// Occurs when a message is received from the remote peer.
/// </summary>
/// <since_tizen> 9 </since_tizen>
- public event EventHandler<WebRTCDataChannelMessageReceivedEventArgs> MessageReceived;
+ public event EventHandler<WebRTCDataChannelMessageReceivedEventArgs> MessageReceived
+ {
+ add
+ {
+ if (_messageReceived == null)
+ {
+ RegisterDataChannelMessageReceivedCallback();
+ }
+ _messageReceived += value;
+ }
+ remove
+ {
+ _messageReceived -= value;
+ if (_messageReceived == null)
+ {
+ UnregisterDataChannelMessageReceivedCallback();
+ }
+ }
+ }
/// <summary>
/// Occurs when an error occurs on the data channel.
/// </summary>
/// <since_tizen> 9 </since_tizen>
- public event EventHandler<WebRTCDataChannelErrorOccurredEventArgs> ErrorOccurred;
-
- private void RegisterEvents()
+ public event EventHandler<WebRTCDataChannelErrorOccurredEventArgs> ErrorOccurred
{
- RegisterDataChannelOpenedCallback();
- RegisterDataChannelClosedCallback();
- RegisterDataChannelMsgRecvCallback();
- RegisterDataChannelErrorOccurredCallback();
+ add
+ {
+ if (_errorOccurred == null)
+ {
+ RegisterDataChannelErrorOccurredCallback();
+ }
+ _errorOccurred += value;
+ }
+ remove
+ {
+ _errorOccurred -= value;
+ if (_errorOccurred == null)
+ {
+ UnregisterDataChannelErrorOccurredCallback();
+ }
+ }
}
private void RegisterDataChannelOpenedCallback()
{
_webRtcDataChannelOpenedCallback = (dataChannelHandle, _) =>
{
- Opened?.Invoke(this, new EventArgs());
+ _opened?.Invoke(this, new EventArgs());
};
NativeDataChannel.SetOpenedCb(_handle, _webRtcDataChannelOpenedCallback).
ThrowIfFailed("Failed to set data channel opened callback.");
}
- private void RegisterDataChannelMsgRecvCallback()
+ private void UnregisterDataChannelOpenedCallback()
+ {
+ NativeDataChannel.UnsetOpenedCb(_handle).
+ ThrowIfFailed("Failed to unset data channel opened callback.");
+ }
+
+ private void RegisterDataChannelClosedCallback()
+ {
+ _webRtcDataChannelClosedCallback = (dataChannelHandle, _) =>
+ {
+ _closed?.Invoke(this, new EventArgs());
+ };
+
+ NativeDataChannel.SetClosedCb(_handle, _webRtcDataChannelClosedCallback).
+ ThrowIfFailed("Failed to set data channel closed callback.");
+ }
+
+ private void UnregisterDataChannelClosedCallback()
+ {
+ NativeDataChannel.UnsetClosedCb(_handle).
+ ThrowIfFailed("Failed to unset data channel closed callback.");
+ }
+
+ private void RegisterDataChannelMessageReceivedCallback()
{
_webRtcDataChannelMsgRecvCallback = (dataChannelHandle, type, message, _) =>
{
- MessageReceived?.Invoke(this, new WebRTCDataChannelMessageReceivedEventArgs(type, message));
+ _messageReceived?.Invoke(this, new WebRTCDataChannelMessageReceivedEventArgs(type, message));
};
NativeDataChannel.SetMessageReceivedCb(_handle, _webRtcDataChannelMsgRecvCallback).
ThrowIfFailed("Failed to set data channel message received callback.");
}
+ private void UnregisterDataChannelMessageReceivedCallback()
+ {
+ NativeDataChannel.UnsetMessageReceivedCb(_handle).
+ ThrowIfFailed("Failed to unset data channel message received callback.");
+ }
+
private void RegisterDataChannelErrorOccurredCallback()
{
_webRtcDataChannelErrorOccurredCallback = (dataChannelHandle, error, _) =>
{
- ErrorOccurred?.Invoke(this, new WebRTCDataChannelErrorOccurredEventArgs((WebRTCError)error));
+ _errorOccurred?.Invoke(this, new WebRTCDataChannelErrorOccurredEventArgs((WebRTCError)error));
};
NativeDataChannel.SetErrorOccurredCb(_handle, _webRtcDataChannelErrorOccurredCallback).
ThrowIfFailed("Failed to set data channel error callback.");
}
- private void RegisterDataChannelClosedCallback()
+ private void UnregisterDataChannelErrorOccurredCallback()
{
- _webRtcDataChannelClosedCallback = (dataChannelHandle, _) =>
- {
- Closed?.Invoke(this, new EventArgs());
- };
-
- NativeDataChannel.SetClosedCb(_handle, _webRtcDataChannelClosedCallback).
- ThrowIfFailed("Failed to set data channel closed callback.");
+ NativeDataChannel.UnsetErrorOccurredCb(_handle).
+ ThrowIfFailed("Failed to unset data channel error callback.");
}
}
}
\ No newline at end of file
Debug.Assert(_handle != null);
Label = label;
-
- RegisterEvents();
}
internal WebRTCDataChannel(IntPtr dataChannelHandle)
ThrowIfFailed("Failed to get label");
Label = label;
-
- Log.Info(WebRTCLog.Tag, "Register event");
- RegisterEvents();
}
private IntPtr Handle
return;
}
- if (true)
+ if (_handle != null)
{
NativeDataChannel.Destroy(_handle);
_disposed = true;
/// <remarks>This state is related in SDP offer/answer.</remarks>
/// <seealso cref="WebRTC.SetLocalDescription"/>
/// <seealso cref="WebRTC.SetRemoteDescription"/>
- /// <seealso cref="WebRTC.CreateAnswer()"/>
+ /// <seealso cref="WebRTC.CreateAnswerAsync()"/>
/// <since_tizen> 9 </since_tizen>
public enum WebRTCSignalingState
{