internal override void OnDetached(WebRTC webRtc) { NativeWebRTC.RemoveMediaSource(webRtc.Handle, SourceId.Value). ThrowIfFailed($"Failed to remove {MediaType.ToString()} MediaTestSource."); WebRtc = null; }
/// <summary> /// Starts the WebRTC asynchronously. /// </summary> /// <remarks> /// The WebRTC must be in the <see cref="WebRTCState.Idle"/> state.<br/> /// The WebRTC state will be <see cref="WebRTCState.Negotiating"/> state.<br/> /// This ensures that <see cref="State" /> is changed to <see cref="WebRTCState.Negotiating"/> state. /// </remarks> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <see also="WebRTCState"/> /// <see also="CreateOffer"/> /// <see also="CreateSetOffer"/> /// <since_tizen> 9 </since_tizen> public async Task StartAsync() { ValidateWebRTCState(WebRTCState.Idle); var tcs = new TaskCompletionSource <bool>(); EventHandler <WebRTCStateChangedEventArgs> stateChangedEventHandler = (s, e) => { if (e.Current == WebRTCState.Negotiating) { tcs.TrySetResult(true); } }; try { StateChanged += stateChangedEventHandler; NativeWebRTC.Start(Handle).ThrowIfFailed("Failed to start the WebRTC"); await tcs.Task.ConfigureAwait(false); await Task.Yield(); } finally { StateChanged -= stateChangedEventHandler; } }
uint IDisplayable <uint> .ApplyEcoreWindow(IntPtr windowHandle) { NativeWebRTC.SetEcoreVideoLoopback(WebRtc.Handle, SourceId.Value, windowHandle, out uint trackId). ThrowIfFailed("Failed to set ecore video loopback"); return(trackId); }
/// <summary> /// Enables the audio loopback. The local audio will be played with <paramref name="policy"/>. /// </summary> /// <param name="policy">The <see cref="AudioStreamPolicy"/> to apply.</param> /// <remarks> /// <see cref="MediaSource"/> does not support all <see cref="AudioStreamType"/>.<br/> /// Supported types are <see cref="AudioStreamType.Media"/>, <see cref="AudioStreamType.Voip"/>, /// <see cref="AudioStreamType.MediaExternalOnly"/>.<br/> /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="policy"/> is null.</exception> /// <exception cref="InvalidOperationException"> /// MediaSource is not attached yet.<br/> /// -or-<br/> /// This MediaSource is not Audio /// </exception> /// <exception cref="NotSupportedException"> /// <see cref="AudioStreamType"/> of <paramref name="policy"/> is not supported on the current platform. /// </exception> /// <exception cref="ObjectDisposedException"> /// <paramref name="policy"/> or WebRTC has already been disposed. /// </exception> /// <returns><see cref="MediaStreamTrack"/></returns> public MediaStreamTrack EnableAudioLoopback(AudioStreamPolicy policy) { if (!SourceId.HasValue) { throw new InvalidOperationException("MediaSource is not attached yet. Call AddSource() first."); } if (policy == null) { throw new ArgumentNullException(nameof(policy)); } if (MediaType != MediaType.Audio) { throw new InvalidOperationException("AudioLoopback is only for Audio MediaSource"); } var ret = NativeWebRTC.SetAudioLoopback(WebRtc.Handle, SourceId.Value, policy.Handle, out uint trackId); if (ret == WebRTCErrorCode.InvalidArgument) { throw new NotSupportedException("The specified policy is not supported on the current system."); } ret.ThrowIfFailed("Failed to set the audio stream policy to the WebRTC"); return(new MediaStreamTrack(WebRtc, MediaType, trackId)); }
/// <summary> /// Creates SDP answer asynchronously with option to an offer received from a remote peer. /// </summary> /// <remarks>The WebRTC must be in the <see cref="WebRTCState.Negotiating"/></remarks> /// <returns>The SDP answer.</returns> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <seealso cref="CreateOfferAsync()"/> /// <since_tizen> 9 </since_tizen> public async Task <string> CreateAnswerAsync() { ValidateWebRTCState(WebRTCState.Negotiating); var tcsSdpCreated = new TaskCompletionSource <string>(); NativeWebRTC.SdpCreatedCallback cb = (handle, sdp, _) => { tcsSdpCreated.TrySetResult(sdp); }; string answer = null; using (var cbKeeper = ObjectKeeper.Get(cb)) { NativeWebRTC.CreateSDPAnswerAsync(Handle, new SafeBundleHandle(), cb, IntPtr.Zero). ThrowIfFailed("Failed to create answer asynchronously"); answer = await tcsSdpCreated.Task.ConfigureAwait(false); await Task.Yield(); } return(answer); }
/// <summary> /// Sets the session description of the remote peer's current offer or answer. /// </summary> /// <remarks>The WebRTC must be in the <see cref="WebRTCState.Negotiating"/>.</remarks> /// <param name="description">The remote session description.</param> /// <exception cref="ArgumentException">The description is empty string.</exception> /// <exception cref="ArgumentNullException">The description is null.</exception> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <seealso cref="CreateOfferAsync()"/> /// <seealso cref="CreateAnswerAsync()"/> /// <since_tizen> 9 </since_tizen> public void SetRemoteDescription(string description) { ValidateWebRTCState(WebRTCState.Negotiating); ValidationUtil.ValidateIsNullOrEmpty(description, nameof(description)); NativeWebRTC.SetRemoteDescription(Handle, description).ThrowIfFailed("Failed to set description."); }
/// <summary> /// Adds a new ICE candidate from the remote peer over its signaling channel. /// </summary> /// <remarks>The WebRTC must be in the <see cref="WebRTCState.Negotiating"/>.</remarks> /// <param name="iceCandidate">The ICE candidate.</param> /// <exception cref="ArgumentException">The ICE candidate is empty string.</exception> /// <exception cref="ArgumentNullException">The ICE candidate is null.</exception> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <since_tizen> 9 </since_tizen> public void AddIceCandidate(string iceCandidate) { ValidateWebRTCState(WebRTCState.Negotiating); ValidationUtil.ValidateIsNullOrEmpty(iceCandidate, nameof(iceCandidate)); NativeWebRTC.AddIceCandidate(Handle, iceCandidate).ThrowIfFailed("Failed to set ICE candidate."); }
/// <summary> /// Creates SDP answer with option to an offer received from a remote peer. /// </summary> /// <remarks> /// The WebRTC must be in the <see cref="WebRTCState.Negotiating"/>.<br/> /// The SDP offer must be set by <see cref="SetRemoteDescription"/> before creating answer. /// </remarks> /// <param name="bundle">Configuration options for the answer.</param> /// <returns>The SDP answer.</returns> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <seealso cref="CreateAnswer()"/> /// <seealso cref="SetRemoteDescription(string)"/> /// <since_tizen> 9 </since_tizen> public string CreateAnswer(Bundle bundle) { ValidateWebRTCState(WebRTCState.Negotiating); var bundle_ = bundle?.SafeBundleHandle ?? new SafeBundleHandle(); NativeWebRTC.CreateSDPAnswer(Handle, bundle_, out string answer).ThrowIfFailed("Failed to create answer"); return(answer); }
internal override void OnDetached(WebRTC webRtc) { NativeWebRTC.RemoveMediaSource(webRtc.Handle, SourceId.Value). ThrowIfFailed("Failed to remove MediaPacketSource."); AudioConfiguration?.OnWebRTCUnset(); VideoConfiguration?.OnWebRTCUnset(); WebRtc = null; }
private void RegisterIceCandidateCallback() { _webRtcIceCandidateCallback = (handle, candidate, _) => { IceCandidate?.Invoke(this, new WebRTCIceCandidateEventArgs(candidate)); }; NativeWebRTC.SetIceCandidateCb(Handle, _webRtcIceCandidateCallback). ThrowIfFailed("Failed to set ice candidate callback."); }
private void RegisterNegotiationNeededCallback() { _webRtcNegotiationNeededCallback = (handle, _) => { NegotiationNeeded?.Invoke(this, new EventArgs()); }; NativeWebRTC.SetNegotiationNeededCb(Handle, _webRtcNegotiationNeededCallback). ThrowIfFailed("Failed to set negotiation needed callback."); }
uint IDisplayable <uint> .ApplyEvasDisplay(DisplayType type, EvasObject evasObject) { Debug.Assert(Enum.IsDefined(typeof(DisplayType), type)); Debug.Assert(type != DisplayType.None); NativeWebRTC.SetVideoLoopback(WebRtc.Handle, SourceId.Value, type == DisplayType.Overlay ? WebRTCDisplayType.Overlay : WebRTCDisplayType.Evas, evasObject, out uint trackId).ThrowIfFailed("Failed to set video loopback"); return(trackId); }
private void RegisterIceConnectionStateChangedCallback() { _webRtcIceConnectionStateChangedCallback = (handle, state, _) => { Log.Info(WebRTCLog.Tag, $"Ice connection state : {state}"); IceConnectionStateChanged?.Invoke(this, new WebRTCIceConnectionStateChangedEventArgs(state)); }; NativeWebRTC.SetIceConnectionStateChangedCb(Handle, _webRtcIceConnectionStateChangedCallback). ThrowIfFailed("Failed to set ICE connection state changed callback."); }
private void RegisterErrorOccurredCallback() { _webRtcErrorOccurredCallback = (handle, error, state, _) => { Log.Info(WebRTCLog.Tag, $"{error}, {state}"); ErrorOccurred?.Invoke(this, new WebRTCErrorOccurredEventArgs((WebRTCError)error, state)); }; NativeWebRTC.SetErrorOccurredCb(Handle, _webRtcErrorOccurredCallback). ThrowIfFailed("Failed to set error occurred callback."); }
private void RegisterTrackAddedCallback() { _webRtcTrackAddedCallback = (handle, type, id, _) => { Log.Info(WebRTCLog.Tag, $"Track type[{type}], id[{id}]"); TrackAdded?.Invoke(this, new WebRTCTrackAddedEventArgs(new MediaStreamTrack(this, type, id))); }; NativeWebRTC.SetTrackAddedCb(Handle, _webRtcTrackAddedCallback). ThrowIfFailed("Failed to set track added callback."); }
private void RegisterSignalingStateChangedCallback() { _webRtcSignalingStateChangedCallback = (handle, state, _) => { Log.Info(WebRTCLog.Tag, $"Signaling state : {state}"); SignalingStateChanged?.Invoke(this, new WebRTCSignalingStateChangedEventArgs(state)); }; NativeWebRTC.SetSignalingStateChangedCb(Handle, _webRtcSignalingStateChangedCallback). ThrowIfFailed("Failed to set signaling state changed callback."); }
private void RegisterStateChangedCallback() { _webRtcStateChangedCallback = (handle, previous, current, _) => { Log.Info(WebRTCLog.Tag, $"{previous}, {current}"); StateChanged?.Invoke(this, new WebRTCStateChangedEventArgs(previous, current)); }; NativeWebRTC.SetStateChangedCb(Handle, _webRtcStateChangedCallback). ThrowIfFailed("Failed to set state changed callback."); }
protected override bool ReleaseHandle() { var ret = NativeWebRTC.Destroy(handle); if (ret != WebRTCErrorCode.None) { Log.Debug(GetType().FullName, $"Failed to release native {GetType().Name}"); return(false); } return(true); }
private void RegisterBufferStatusChangedCallback() { _mediaPacketBufferStatusChangedCallback = (sourceId_, state, _) => { Log.Info(WebRTCLog.Tag, $"sourceId:{sourceId_}, state:{state}"); BufferStatusChanged?.Invoke(this, new MediaPacketBufferStatusChangedEventArgs(sourceId_, state)); }; NativeWebRTC.SetBufferStateChangedCb(Handle, SourceId, _mediaPacketBufferStatusChangedCallback, IntPtr.Zero). ThrowIfFailed("Failed to set buffer status changed callback."); }
/// <summary> /// Sets a turn server. /// </summary> /// <exception cref="ArgumentNullException">The <paramref name="turnServer"/> is null.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <since_tizen> 9 </since_tizen> public void SetTurnServer(string turnServer) { ValidateNotDisposed(); if (turnServer == null) { throw new ArgumentNullException(nameof(turnServer), "Turn server name is null."); } NativeWebRTC.AddTurnServer(Handle, turnServer). ThrowIfFailed("Failed to add turn server"); }
private void RegisterVideoFrameEncodedCallback() { _webRtcVideoFrameEncodedCallback = (handle, type, id, packet, _) => { Log.Info(WebRTCLog.Tag, $"Track type[{type}], id[{id}]"); _videoFrameEncoded?.Invoke(this, new WebRTCFrameEncodedEventArgs(new MediaStreamTrack(this, type, id), MediaPacket.From(packet))); }; NativeWebRTC.SetVideoFrameEncodedCb(Handle, _webRtcVideoFrameEncodedCallback). ThrowIfFailed("Failed to set video frame encoded callback."); }
/// <summary> /// Pushes elementary stream to decode audio or video. /// </summary> /// <remarks> /// This source must be set as a source to a WebRTC and the WebRTC must be in the /// <see cref="WebRTCState.Negotiating"/> or <see cref="WebRTCState.Playing"/> state /// </remarks> /// <param name="packet">The <see cref="MediaPacket"/> to decode.</param> /// <exception cref="InvalidOperationException"> /// This source is not set as a source to a WebRTC.<br/> /// -or-<br/> /// The WebRTC is not in the valid state. /// </exception> /// <exception cref="ArgumentNullException"><paramref name="packet"/> is null.</exception> /// <exception cref="ObjectDisposedException"><paramref name="packet"/> has been disposed.</exception> /// <exception cref="ArgumentException"> /// <paramref name="packet"/> is neither video nor audio type.<br/> /// -or-<br/> /// The format of packet is not matched with the specified format in the constructor. /// </exception> /// <seealso cref="WebRTC.AddSource"/> /// <seealso cref="WebRTC.AddSources"/> /// <seealso cref="MediaPacket"/> /// <since_tizen> 9 </since_tizen> public void Push(MediaPacket packet) { if (WebRtc == null) { Log.Error(WebRTCLog.Tag, "The source is not set as a source to a WebRTC yet."); throw new InvalidOperationException("The source is not set as a source to a WebRTC yet."); } if (packet == null) { Log.Error(WebRTCLog.Tag, "packet is null"); throw new ArgumentNullException(nameof(packet)); } if (packet.IsDisposed) { Log.Error(WebRTCLog.Tag, "packet is disposed"); throw new ObjectDisposedException(nameof(packet)); } if (packet.Format.Type == MediaFormatType.Text || packet.Format.Type == MediaFormatType.Container) { Log.Error(WebRTCLog.Tag, "The format of the packet is invalid : " + packet.Format.Type); throw new ArgumentException($"The format of the packet is invalid : {packet.Format.Type}."); } if (!packet.Format.Equals(_audioMediaFormat) && !packet.Format.Equals(_videoMediaFormat)) { Log.Error(WebRTCLog.Tag, "The format of the packet is invalid : Unmatched format."); throw new ArgumentException("The format of the packet is invalid : Unmatched format."); } if (packet.Format.Type == MediaFormatType.Video && _videoMediaFormat == null) { Log.Error(WebRTCLog.Tag, "Video is not configured with the current source."); throw new ArgumentException("Video is not configured with the current source."); } if (packet.Format.Type == MediaFormatType.Audio && _audioMediaFormat == null) { Log.Error(WebRTCLog.Tag, "Audio is not configured with the current source."); throw new ArgumentException("Audio is not configured with the current source."); } WebRtc.ValidateWebRTCState(WebRTCState.Negotiating, WebRTCState.Playing); NativeWebRTC.PushMediaPacket(WebRtc.Handle, SourceId.Value, packet.GetHandle()). ThrowIfFailed("Failed to push the packet to the WebRTC"); }
internal override void OnAttached(WebRTC webRtc) { Debug.Assert(webRtc != null); if (WebRtc != null) { throw new InvalidOperationException("The source is has already been assigned to another WebRTC."); } var type = MediaType == MediaType.Video ? MediaSourceType.VideoTest : MediaSourceType.AudioTest; NativeWebRTC.AddMediaSource(webRtc.Handle, type, out uint sourceId). ThrowIfFailed($"Failed to add {MediaType.ToString()} MediaTestSource."); WebRtc = webRtc; SourceId = sourceId; }
/// <summary> /// Initializes a new instance of the <see cref="WebRTC"/> class. /// </summary> /// <feature>http://tizen.org/feature/network.wifi</feature> /// <feature>http://tizen.org/feature/network.telephony</feature> /// <feature>http://tizen.org/feature/network.ethernet</feature> /// <privilege>http://tizen.org/privilege/internet</privilege> /// <exception cref="UnauthorizedAccessException">Thrown when the permission is denied.</exception> /// <exception cref="NotSupportedException">The required feature is not supported.</exception> /// <since_tizen> 9 </since_tizen> public WebRTC() { if (!Features.IsSupported(WebRTCFeatures.Wifi) && !Features.IsSupported(WebRTCFeatures.Telephony) && !Features.IsSupported(WebRTCFeatures.Ethernet)) { throw new NotSupportedException("Network features are not supported."); } NativeWebRTC.Create(out _handle).ThrowIfFailed("Failed to create webrtc"); Debug.Assert(_handle != null); RegisterEvents(); _source = new List <MediaSource>(); }
/// <summary> /// Creates SDP offer asynchronously to start a new WebRTC connection to a remote peer. /// </summary> /// <remarks>The WebRTC must be in the <see cref="WebRTCState.Negotiating"/></remarks> /// <returns>The SDP offer.</returns> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <seealso cref="CreateAnswerAsync()"/> /// <since_tizen> 9 </since_tizen> public async Task <string> CreateOfferAsync() { ValidateWebRTCState(WebRTCState.Negotiating); var tcsSdpCreated = new TaskCompletionSource <string>(); NativeWebRTC.SdpCreatedCallback cb = (handle, sdp, _) => { tcsSdpCreated.TrySetResult(sdp); }; NativeWebRTC.CreateSDPOfferAsync(Handle, new SafeBundleHandle(), cb, IntPtr.Zero). ThrowIfFailed("Failed to create offer asynchronously"); var offer = await tcsSdpCreated.Task.ConfigureAwait(false); await Task.Yield(); return(offer); }
private void SetMediaStreamInfo(MediaFormat mediaFormat) { if (mediaFormat == null) { return; } IntPtr ptr = IntPtr.Zero; try { ptr = mediaFormat.AsNativeHandle(); NativeWebRTC.SetMediaPacketSourceInfo(WebRtc.Handle, SourceId.Value, ptr). ThrowIfFailed("Failed to set the media stream info"); } finally { MediaFormat.ReleaseNativeHandle(ptr); } }
/// <summary> /// Gets all turn servers. /// </summary> /// <returns>The turn server list.</returns> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <since_tizen> 9 </since_tizen> public ReadOnlyCollection <string> GetTurnServer() { ValidateNotDisposed(); var list = new List <string>(); NativeWebRTC.RetrieveTurnServerCallback callback = (server, _) => { if (!string.IsNullOrWhiteSpace(server)) { list.Add(server); } return(true); }; NativeWebRTC.ForeachTurnServer(Handle, callback).ThrowIfFailed("Failed to retrieve turn server"); return(list.AsReadOnly()); }
internal override void OnAttached(WebRTC webRtc) { Debug.Assert(webRtc != null); if (WebRtc != null) { Log.Error(WebRTCLog.Tag, "The source is has already been assigned to another WebRTC."); throw new InvalidOperationException("The source is has already been assigned to another WebRTC."); } NativeWebRTC.AddMediaSource(webRtc.Handle, MediaSourceType.MediaPacket, out uint sourceId). ThrowIfFailed("Failed to add MediaPacketSource."); WebRtc = webRtc; SourceId = sourceId; AudioConfiguration?.OnWebRTCSet(); VideoConfiguration?.OnWebRTCSet(); SetMediaStreamInfo(_audioMediaFormat); SetMediaStreamInfo(_videoMediaFormat); }
/// <summary> /// Stops the WebRTC. /// </summary> /// <remarks> /// The WebRTC must be in the <see cref="WebRTCState.Negotiating"/> or <see cref="WebRTCState.Playing"/> state.<br/> /// The WebRTC state will be <see cref="WebRTCState.Idle"/> state.<br/> /// The user should check whether <see cref="State" /> is changed to <see cref="WebRTCState.Idle"/> state or not. /// </remarks> /// <exception cref="InvalidOperationException">The WebRTC is not in the valid state.</exception> /// <exception cref="ObjectDisposedException">The WebRTC has already been disposed.</exception> /// <since_tizen> 9 </since_tizen> public void Stop() { ValidateWebRTCState(WebRTCState.Negotiating, WebRTCState.Playing); NativeWebRTC.Stop(Handle).ThrowIfFailed("Failed to stop the WebRTC"); }
private void UnregisterBufferStatusChangedCallback() { NativeWebRTC.UnsetBufferStateChangedCb(Handle, SourceId). ThrowIfFailed("Failed to unset buffer status changed callback."); }