public async Task SctpError() { // Setup var config = new PeerConnectionConfiguration(); var pc1 = new PeerConnection(); var pc2 = new PeerConnection(); await pc1.InitializeAsync(config); await pc2.InitializeAsync(config); pc1.LocalSdpReadytoSend += async(string type, string sdp) => { await pc2.SetRemoteDescriptionAsync(type, sdp); if (type == "offer") { pc2.CreateAnswer(); } }; pc2.LocalSdpReadytoSend += async(string type, string sdp) => { await pc1.SetRemoteDescriptionAsync(type, sdp); if (type == "offer") { pc1.CreateAnswer(); } }; pc1.IceCandidateReadytoSend += (string candidate, int sdpMlineindex, string sdpMid) => pc2.AddIceCandidate(sdpMid, sdpMlineindex, candidate); pc2.IceCandidateReadytoSend += (string candidate, int sdpMlineindex, string sdpMid) => pc1.AddIceCandidate(sdpMid, sdpMlineindex, candidate); // Connect { var c1 = new ManualResetEventSlim(false); var c2 = new ManualResetEventSlim(false); pc1.Connected += () => c1.Set(); pc2.Connected += () => c2.Set(); Assert.True(pc1.CreateOffer()); Assert.True(c1.Wait(TimeSpan.FromSeconds(60.0))); Assert.True(c2.Wait(TimeSpan.FromSeconds(60.0))); Assert.True(pc1.IsConnected); Assert.True(pc1.IsConnected); } // Try to add a data channel. This should fail because SCTP was not negotiated. Assert.ThrowsAsync <SctpNotNegotiatedException>(async() => await pc1.AddDataChannelAsync("dummy", false, false)); Assert.ThrowsAsync <SctpNotNegotiatedException>(async() => await pc1.AddDataChannelAsync(42, "dummy", false, false)); // Clean-up pc1.Close(); pc1.Dispose(); pc2.Close(); pc2.Dispose(); }
/// <summary> /// Initialize the peer connection. /// </summary> /// <returns>A task that completes once the peer connection is ready to be used.</returns> public async Task InitializePeerConnectionAsync() { Logger.Log("Initializing the peer connection..."); // Cannot run in UI thread on UWP because this will initialize the global factory // (first library call) which needs to be done on a background thread. await ThreadHelper.RunOnWorkerThread(() => Library.ShutdownOptions = Library.ShutdownOptionsFlags.LogLiveObjects); // Initialize the native peer connection object try { var config = new PeerConnectionConfiguration { SdpSemantic = _sdpSemantic, IceServers = new List <IceServer> { _iceServer } }; await _peerConnection.InitializeAsync(config); RaisePropertyChanged("IsPeerInitialized"); } catch (Exception ex) { Logger.Log($"WebRTC native plugin init failed: {ex.Message}"); throw ex; } Logger.Log("Peer connection initialized."); OnPeerInitialized(); //using (_sessionViewModel.GetNegotiationDeferral()) { //// As a convenience, add 1 audio and 1 video transceivers //// TODO - make that more flexible //AddPendingTransceiver(MediaKind.Audio, "audio_transceiver_0"); //AddPendingTransceiver(MediaKind.Video, "video_transceiver_1"); // It is CRUCIAL to add any data channel BEFORE the SDP offer is sent, if data channels are // to be used at all. Otherwise the SCTP will not be negotiated, and then all channels will // stay forever in the kConnecting state. // https://stackoverflow.com/questions/43788872/how-are-data-channels-negotiated-between-two-peers-with-webrtc await _peerConnection.AddDataChannelAsync(ChatChannelID, "chat", true, true); } //_videoPlayer.CurrentStateChanged += OnMediaStateChanged; //_videoPlayer.MediaOpened += OnMediaOpened; //_videoPlayer.MediaFailed += OnMediaFailed; //_videoPlayer.MediaEnded += OnMediaEnded; //_videoPlayer.RealTimePlayback = true; //_videoPlayer.AutoPlay = false; // Bind the XAML UI control (videoPlayerElement) to the MediaFoundation rendering pipeline (_videoPlayer) // so that the former can render in the UI the video frames produced in the background by the latter. //videoPlayerElement.SetMediaPlayer(_videoPlayer); }
/// <summary> /// Main UI thread callback on WebRTC native plugin initialized. /// </summary> private void OnPluginPostInit() { PluginInitialized = true; LogMessage("WebRTC native plugin initialized."); // It is CRUCIAL to add any data channel BEFORE the SDP offer is sent, if data channels are // to be used at all. Otherwise the SCTP will not be negotiated, and then all channels will // stay forever in the kConnecting state. // https://stackoverflow.com/questions/43788872/how-are-data-channels-negotiated-between-two-peers-with-webrtc _peerConnection.AddDataChannelAsync(ChatChannelID, "chat", true, true).ContinueWith((prevTask) => { if (prevTask.Exception != null) { throw prevTask.Exception; } var newDataChannel = prevTask.Result; RunOnMainThread(() => { _chatDataChannel = newDataChannel; _chatDataChannel.MessageReceived += ChatMessageReceived; chatInputBox.IsEnabled = true; chatSendButton.IsEnabled = true; }); }); createOfferButton.IsEnabled = true; // Do not allow starting the local video before the MediaElement told us it was // safe to do so (see OnMediaOpened). Otherwise Play() will silently fail. startLocalVideo.IsEnabled = false; localVideoPlayer.CurrentStateChanged += OnMediaStateChanged; localVideoPlayer.MediaOpened += OnMediaOpened; localVideoPlayer.MediaFailed += OnMediaFailed; localVideoPlayer.MediaEnded += OnMediaEnded; localVideoPlayer.RealTimePlayback = true; localVideoPlayer.AutoPlay = false; localVideoSource = CreateVideoStreamSource(640, 480); //< TODO width,height localVideoPlayer.Source = MediaSource.CreateFromMediaStreamSource(localVideoSource); remoteVideoPlayer.CurrentStateChanged += OnMediaStateChanged; remoteVideoPlayer.MediaOpened += OnMediaOpened; remoteVideoPlayer.MediaFailed += OnMediaFailed; remoteVideoPlayer.MediaEnded += OnMediaEnded; remoteVideoSource = CreateVideoStreamSource(640, 480); //< TODO width,height remoteVideoPlayer.Source = MediaSource.CreateFromMediaStreamSource(remoteVideoSource); // Bind the XAML UI control (localVideo) to the MediaFoundation rendering pipeline (localVideoPlayer) // so that the former can render in the UI the video frames produced in the background by the later. localVideo.SetMediaPlayer(localVideoPlayer); remoteVideo.SetMediaPlayer(remoteVideoPlayer); }
public async Task Init(IceServerModel[] iceServers) { Logger.Debug("Starting WebRTC connection."); IceServers = iceServers; PeerSession = new PeerConnection(); var iceList = IceServers.Select(x => new IceServer() { Urls = { x.Url }, TurnPassword = x.TurnPassword ?? string.Empty, TurnUserName = x.TurnUsername ?? string.Empty }).ToList(); var config = new PeerConnectionConfiguration() { IceServers = iceList }; await PeerSession.InitializeAsync(config); PeerSession.LocalSdpReadytoSend += PeerSession_LocalSdpReadytoSend;; PeerSession.Connected += PeerConnection_Connected; PeerSession.IceStateChanged += PeerConnection_IceStateChanged; PeerSession.IceCandidateReadytoSend += PeerSession_IceCandidateReadytoSend;; CaptureChannel = await PeerSession.AddDataChannelAsync("ScreenCapture", true, true); CaptureChannel.BufferingChanged += DataChannel_BufferingChanged; CaptureChannel.MessageReceived += CaptureChannel_MessageReceived; CaptureChannel.StateChanged += CaptureChannel_StateChanged; VideoSource = ExternalVideoTrackSource.CreateFromArgb32Callback(GetCaptureFrame); Transceiver = PeerSession.AddTransceiver(MediaKind.Video); PeerSession.CreateOffer(); }
/// <summary> /// Initialize the peer connection. /// </summary> /// <returns>A task that completes once the peer connection is ready to be used.</returns> public async Task InitializePeerConnectionAsync() { Logger.Log("Initializing the peer connection..."); // Cannot run in UI thread on UWP because this will initialize the global factory // (first library call) which needs to be done on a background thread. await ThreadHelper.RunOnWorkerThread(() => Library.ShutdownOptions = Library.ShutdownOptionsFlags.LogLiveObjects); // Initialize the native peer connection object try { var config = new PeerConnectionConfiguration { SdpSemantic = _sdpSemantic, IceServers = new List <IceServer> { _iceServer } }; await _peerConnection.InitializeAsync(config); RaisePropertyChanged("IsPeerInitialized"); } catch (Exception ex) { Logger.Log($"WebRTC native plugin init failed: {ex.Message}"); throw ex; } Logger.Log("Peer connection initialized."); OnPeerInitialized(); // It is CRUCIAL to add any data channel BEFORE the SDP offer is sent, if data channels are // to be used at all. Otherwise the SCTP will not be negotiated, and then all channels will // stay forever in the kConnecting state. // https://stackoverflow.com/questions/43788872/how-are-data-channels-negotiated-between-two-peers-with-webrtc await _peerConnection.AddDataChannelAsync(ChatChannelID, "chat", true, true); //_videoPlayer.CurrentStateChanged += OnMediaStateChanged; //_videoPlayer.MediaOpened += OnMediaOpened; //_videoPlayer.MediaFailed += OnMediaFailed; //_videoPlayer.MediaEnded += OnMediaEnded; //_videoPlayer.RealTimePlayback = true; //_videoPlayer.AutoPlay = false; // Bind the XAML UI control (videoPlayerElement) to the MediaFoundation rendering pipeline (_videoPlayer) // so that the former can render in the UI the video frames produced in the background by the latter. //videoPlayerElement.SetMediaPlayer(_videoPlayer); //// Uncomment to initialize local transceivers and tracks. //if (Utils.IsFirstInstance()) //{ // // Add transceivers // var transceiverA = AddTransceiver(MediaKind.Audio, // new TransceiverInitSettings { Name = "audio_transceiver" }); // var transceiverV = AddTransceiver(MediaKind.Video, // new TransceiverInitSettings { Name = "video_transceiver", }); // // Add audio track // var sourceA = await DeviceAudioTrackSource.CreateAsync(new LocalAudioDeviceInitConfig()); // var trackA = LocalAudioTrack.CreateFromSource(sourceA, // new LocalAudioTrackInitConfig { trackName = "local_audio" }); // AddAudioTrack(trackA, "Audio Device"); // // Add the track to the transceiver. // { // var transceiverVM = Transceivers.First(t => t.Transceiver == transceiverA); // var trackVM = transceiverVM.AvailableSenders.Last(); // transceiverVM.Sender = trackVM; // } // // Add video track // var sourceV = await DeviceVideoTrackSource.CreateAsync( // new LocalVideoDeviceInitConfig // { // videoDevice = new VideoCaptureDevice // { // id = @"<insert_device_id>" // }, // videoProfileId = string.Empty, // width = 640, // height = 480, // framerate = 30 // }); // // Crate the track // var trackV = LocalVideoTrack.CreateFromSource(sourceV, // new LocalVideoTrackInitConfig { trackName = "local_video" }); // AddVideoTrack(trackV, "Video Device"); // // Add the track to the transceiver. // { // var transceiverVM = Transceivers.First(t => t.Transceiver == transceiverV); // var trackVM = transceiverVM.AvailableSenders.Last(); // transceiverVM.Sender = trackVM; // } //} }
public async Task InBand() { // Setup var config = new PeerConnectionConfiguration(); var pc1 = new PeerConnection(); var pc2 = new PeerConnection(); await pc1.InitializeAsync(config); await pc2.InitializeAsync(config); pc1.LocalSdpReadytoSend += (string type, string sdp) => { pc2.SetRemoteDescription(type, sdp); if (type == "offer") { pc2.CreateAnswer(); } }; pc2.LocalSdpReadytoSend += (string type, string sdp) => { pc1.SetRemoteDescription(type, sdp); if (type == "offer") { pc1.CreateAnswer(); } }; pc1.IceCandidateReadytoSend += (string candidate, int sdpMlineindex, string sdpMid) => pc2.AddIceCandidate(sdpMid, sdpMlineindex, candidate); pc2.IceCandidateReadytoSend += (string candidate, int sdpMlineindex, string sdpMid) => pc1.AddIceCandidate(sdpMid, sdpMlineindex, candidate); // Add dummy out-of-band data channel to force SCTP negotiating. // Otherwise after connecting AddDataChannelAsync() will fail. await pc1.AddDataChannelAsync(42, "dummy", false, false); await pc2.AddDataChannelAsync(42, "dummy", false, false); // Connect { var c1 = new ManualResetEventSlim(false); var c2 = new ManualResetEventSlim(false); pc1.Connected += () => c1.Set(); pc2.Connected += () => c2.Set(); Assert.True(pc1.CreateOffer()); Assert.True(c1.Wait(TimeSpan.FromSeconds(60.0))); Assert.True(c2.Wait(TimeSpan.FromSeconds(60.0))); Assert.True(pc1.IsConnected); Assert.True(pc1.IsConnected); } // Negotiate data channel in-band DataChannel data1 = null; DataChannel data2 = null; { var c2 = new ManualResetEventSlim(false); pc2.DataChannelAdded += (DataChannel channel) => { data2 = channel; c2.Set(); }; data1 = await pc1.AddDataChannelAsync("test_data_channel", true, true); Assert.IsNotNull(data1); Assert.True(c2.Wait(TimeSpan.FromSeconds(60.0))); Assert.IsNotNull(data2); Assert.AreEqual(data1.Label, data2.Label); // Do not test DataChannel.ID; at this point for in-band channels the ID has not // been agreed upon with the remote peer yet. } // Send data { var c2 = new ManualResetEventSlim(false); string sentText = "Some sample text"; byte[] msg = Encoding.UTF8.GetBytes(sentText); data2.MessageReceived += (byte[] _msg) => { var receivedText = Encoding.UTF8.GetString(_msg); Assert.AreEqual(sentText, receivedText); c2.Set(); }; data1.SendMessage(msg); Assert.True(c2.Wait(TimeSpan.FromSeconds(60.0))); } // Clean-up pc1.Close(); pc1.Dispose(); pc2.Close(); pc2.Dispose(); }
public async Task SetupAsync(Signaler signaler, bool isServer, uint clientNodeId = 0) { await pc.InitializeAsync(new PeerConnectionConfiguration { IceServers = new List <IceServer> { new IceServer { Urls = { signaler.IceServerUrl } } } }); var tcs = new TaskCompletionSource <bool>(); // Do signaling // https://microsoft.github.io/MixedReality-WebRTC/manual/cs/cs-signaling.html // https://microsoft.github.io/MixedReality-WebRTC/manual/cs/helloworld-cs-signaling-core3.html pc.LocalSdpReadytoSend += (SdpMessage sdpMessage) => { // ここはawaitではなくWaitにしないとSocketが切れる.スレッドセーフ関係? signaler.SendSdpAsync(sdpMessage.Type == SdpMessageType.Offer, sdpMessage.Content, clientNodeId).Wait(); }; pc.IceCandidateReadytoSend += (IceCandidate candidate) => { signaler.SendIceAsync(candidate.SdpMid, candidate.SdpMlineIndex, candidate.Content, clientNodeId).Wait(); }; pc.IceStateChanged += (IceConnectionState state) => { Logger.Debug("Connection", $"ICE state changed to {state}"); // https://microsoft.github.io/MixedReality-WebRTC/versions/release/2.0/api/Microsoft.MixedReality.WebRTC.IceConnectionState.html if (state == IceConnectionState.Connected) { Connected = true; } if (state == IceConnectionState.Closed || state == IceConnectionState.Disconnected || state == IceConnectionState.Failed) { Connected = false; OnDisconnect(); } if (!isServer && state == IceConnectionState.Failed) { tcs.SetException(new ConnectionException("Failed to establish a WebRTC connection")); } }; signaler.SdpReceived += async(bool isOffer, string sdp, uint cid) => { if (isServer && cid != clientNodeId) { // ignore messages for other clients return; } await pc.SetRemoteDescriptionAsync(new SdpMessage { Type = isOffer ? SdpMessageType.Offer : SdpMessageType.Answer, Content = sdp }); if (isOffer) { pc.CreateAnswer(); } }; signaler.IceReceived += (string sdpMid, int sdpMLineIndex, string candidate, uint cid) => { if (isServer && cid != clientNodeId) { // ignore messages for other clients return; } pc.AddIceCandidate(new IceCandidate { SdpMid = sdpMid, SdpMlineIndex = sdpMLineIndex, Content = candidate }); //Logger.Write((isServer ? "Server: " : "Client: ") + $"{sdpMid} {sdpMLineIndex} {candidate}"); }; if (isServer) { TaskCompletionSource <DataChannel>[] completionSources = channelTypes.Select( _ => new TaskCompletionSource <DataChannel>() ).ToArray(); pc.DataChannelAdded += (dc) => { foreach (var type in channelTypes) { if (dc.Label == channelLabels[(int)type]) { completionSources[(int)type].SetResult(dc); } } }; Logger.Debug("Connection", "Server is ready for signaling"); await signaler.NotifyReadyAsync(clientNodeId); Logger.Debug("Connection", "Server: Waiting for DC"); foreach (var type in channelTypes) { channels[(int)type] = await completionSources[(int)type].Task; } } else { // Define channels // Sync channel (unreliable) channels[(int)ChannelType.Sync] = await pc.AddDataChannelAsync( channelLabels[(int)ChannelType.Sync], ordered : false, reliable : false); // Message channel (reliable but order is not guaranteed) channels[(int)ChannelType.Control] = await pc.AddDataChannelAsync( channelLabels[(int)ChannelType.Control], ordered : false, reliable : true); // Blob channel (reliable and ordered) channels[(int)ChannelType.Blob] = await pc.AddDataChannelAsync( channelLabels[(int)ChannelType.Blob], ordered : true, reliable : true); // Audio channel (unreliable) channels[(int)ChannelType.Audio] = await pc.AddDataChannelAsync( channelLabels[(int)ChannelType.Audio], ordered : false, reliable : false); Logger.Debug("Connection", "Client: Waiting for server ready"); await signaler.WaitReadyAsync(); pc.CreateOffer(); } foreach (var(dc, idx) in channels.Select((dc, idx) => (dc, idx))) { dc.MessageReceived += (data) => { threadChannels[idx].Writer.TryWrite(data); // Always succeeds because the Channel is unbounded }; dc.StateChanged += () => { Logger.Debug("Connection", $"DC {(ChannelType)idx} state changed to {dc.State}"); if (dc.State == DataChannel.ChannelState.Closing) { // Disconnect handling Connected = false; } }; } //await Task.Delay(5000); if (!isServer) { // FIXME: Waiting pc.Connected not work in server (cannot establish a connection to client) // In server, should wait until all DataChannels are added? pc.Connected += () => { tcs.SetResult(true); }; await tcs.Task; } }
private async void Window_Initialized(object sender, EventArgs e) { _peerConnection = new PeerConnection(); var config = new PeerConnectionConfiguration { IceServers = new List <IceServer> { new IceServer { Urls = { "turn:35.193.0.31:3478" }, TurnPassword = "******", TurnUserName = "******" } } }; _peerConnection.Connected += () => { Debugger.Log(0, "", "Peerconnection: DONE"); }; _peerConnection.IceStateChanged += (IceConnectionState newState) => { Debugger.Log(0, "", $"ICE state: {newState}\n"); }; await _peerConnection.InitializeAsync(config); _dc = await _peerConnection.AddDataChannelAsync(14, "vzgo", true, true); Debugger.Log(0, "", "Peer connection initialized successfully.\n"); _peerConnection.LocalSdpReadytoSend += Peer_LocalSdpReadytoSendAsync; _peerConnection.IceCandidateReadytoSend += Peer_IceCandidateReadytoSend; _hubConnection = new HubConnectionBuilder() .WithUrl(new Uri(SignallerConstants.SignallerUrl)) .AddJsonProtocol() .WithAutomaticReconnect(new[] { TimeSpan.Zero, TimeSpan.Zero, TimeSpan.FromSeconds(10) }) .Build(); _hubConnection.On <string>("Message", (message) => { var msg = JsonConvert.DeserializeObject <SignallingMessage>(message); switch (msg.MessageType) { case SignallingMessage.WireMessageType.Offer: _peerConnection.SetRemoteDescription("offer", msg.Data); _peerConnection.CreateAnswer(); break; case SignallingMessage.WireMessageType.Answer: _peerConnection.SetRemoteDescription("answer", msg.Data); break; case SignallingMessage.WireMessageType.Ice: var parts = msg.Data.Split(new string[] { msg.IceDataSeparator }, StringSplitOptions.RemoveEmptyEntries); // Note the inverted arguments for historical reasons. // 'candidate' is last in AddIceCandidate(), but first in the message. string sdpMid = parts[2]; int sdpMlineindex = int.Parse(parts[1]); string candidate = parts[0]; _peerConnection.AddIceCandidate(sdpMid, sdpMlineindex, candidate); break; } }); await _hubConnection.StartAsync(); await _hubConnection.InvokeAsync("JoinRoom", SignallerConstants.RoomName); }
static async Task Main(string[] args) { DataChannel dataChannel = null; try { // Create a new peer connection automatically disposed at the end of the program using var pc = new PeerConnection(); // Initialize the connection with a STUN server to allow remote access var config = new PeerConnectionConfiguration { IceServers = new List <IceServer> { new IceServer { Urls = { "stun:stun.l.google.com:19302" } } } }; await pc.InitializeAsync(config); Console.WriteLine("Peer connection initialized."); Console.WriteLine("Opening data channel"); dataChannel = await pc.AddDataChannelAsync("data", true, true); // Setup signaling Console.WriteLine("Starting signaling..."); var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe"); signaler.SdpMessageReceived += (string type, string sdp) => { pc.SetRemoteDescription(type, sdp); if (type == "offer") { pc.CreateAnswer(); } }; signaler.IceCandidateReceived += (string sdpMid, int sdpMlineindex, string candidate) => { pc.AddIceCandidate(sdpMid, sdpMlineindex, candidate); }; await signaler.StartAsync(); // Start peer connection pc.Connected += () => { Console.WriteLine("PeerConnection: connected."); }; pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); }; pc.DataChannelAdded += (DataChannel c) => { Console.WriteLine("DataChannel added"); c.MessageReceived += (byte[] _msg) => { Console.WriteLine("received {0} bytes", _msg.Length); }; }; if (signaler.IsClient) { Console.WriteLine("Connecting to remote peer..."); pc.CreateOffer(); } else { Console.WriteLine("Waiting for offer from remote peer..."); } Console.WriteLine("Press a 'S' to send data. 'Esc' to exit ..."); ConsoleKeyInfo key; while ((key = Console.ReadKey(true)).Key != ConsoleKey.Escape) { if (key.Key == ConsoleKey.S) { Console.WriteLine("Sending data"); dataChannel.SendMessage(new byte[3000]); } } signaler.Stop(); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine("Program termined."); }
static async Task Main() { try { Console.WriteLine("Starting..."); //create and Initialize capture object to record audio var waveFormat = new WaveFormat(44100, 32, 2, AudioEncoding.MpegLayer3); WasapiCapture capture = new WasapiCapture(true, AudioClientShareMode.Shared, 100, waveFormat); //initialize the selected device for recording capture.Initialize(); //fill ice servers here List <string> urls = new List <string>(); using var pc = new PeerConnection(); var config = new PeerConnectionConfiguration { IceServers = new List <IceServer> { new IceServer { Urls = urls, } } , BundlePolicy = BundlePolicy.MaxBundle }; await pc.InitializeAsync(config); Console.WriteLine("Peer connection initialized."); //create audio transceiver Transceiver transceiver = pc.AddTransceiver(MediaKind.Audio); transceiver.DesiredDirection = Transceiver.Direction.ReceiveOnly; Console.WriteLine("Create audio transceiver ..."); DataChannel chanel = await pc.AddDataChannelAsync("Data", true, true, cancellationToken : default); string url = ""; WebSocketSharp.WebSocket signaling = new WebSocket(url); signaling.SslConfiguration.EnabledSslProtocols = System.Security.Authentication.SslProtocols.Tls12; signaling.OnMessage += async(sender, message) => { try { //response messages may differ from service provider to another, adjust WebsocketResponse object accordingly var messageObject = JsonConvert.DeserializeObject <WebsocketResponse>(message.Data); var mess = new SdpMessage { Content = messageObject.Data.Sdp, Type = SdpMessage.StringToType("answer") }; if (!string.IsNullOrEmpty(mess.Content)) { Console.WriteLine("Sdpmessage: {0}, Type: {1}", mess.Content, mess.Type); await pc.SetRemoteDescriptionAsync(mess); if (mess.Type == SdpMessageType.Answer) { bool res = pc.CreateAnswer(); Console.WriteLine("Answer created? {0}", res); } } } catch (Exception e) { Console.WriteLine(e.Message); } }; signaling.OnError += (sender, e) => { Console.WriteLine(e.Message, e.Exception); }; signaling.OnOpen += (sender, e) => { pc.CreateOffer(); Console.WriteLine("open"); }; signaling.Connect(); transceiver.Associated += (tranciever) => { Console.WriteLine("Transivier: {0}, {1}", tranciever.Name, tranciever.StreamIDs); }; pc.LocalSdpReadytoSend += (SdpMessage message) => { Console.WriteLine(message.Content); //modify the offer message according to your need var data = new { streamId = "", sdp = message.Content }; var payload = JsonConvert.SerializeObject(new { type = "cmd", transId = 0, name = "view", data = data }); Console.WriteLine("Sdp offer to send: " + payload); signaling.Send(payload); }; pc.RenegotiationNeeded += () => { Console.WriteLine("Regotiation needed"); }; //when a remote audio track is added, start recording pc.AudioTrackAdded += (RemoteAudioTrack track) => { //create a wavewriter to write the data to WaveWriter w = new WaveWriter("audio.mp3", capture.WaveFormat); //setup an eventhandler to receive the recorded data capture.DataAvailable += (s, e) => { //save the recorded audio w.Write(e.Data, e.Offset, e.ByteCount); }; //start recording capture.Start(); //this should output the sound track.OutputToDevice(true); //track.AudioFrameReady += (AudioFrame frame) => //{ //you can print anything here if you want to make sure that's you're recieving audio //}; }; pc.Connected += () => { Console.WriteLine("Connected"); Console.WriteLine(pc.DataChannels.Count); }; pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); }; Console.WriteLine("Press enter to stop"); Console.ReadLine(); //stop recording capture.Stop(); pc.Close(); signaling.Close(); Console.WriteLine("Program termined."); } catch (Exception e) { Console.WriteLine(e.Message); } }