private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context) { logger.LogDebug($"Web socket client connection from {context.UserEndPoint}."); var peerConnection = new RTCPeerConnection(null); // Sink (speaker) only audio end point. WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false); MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(audioTrack); peerConnection.OnAudioFormatsNegotiated += (audioFormats) => windowsAudioEP.SetAudioSinkFormat(audioFormats.First()); peerConnection.OnReceiveReport += RtpSession_OnReceiveReport; peerConnection.OnSendReport += RtpSession_OnSendReport; peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.connected) { await windowsAudioEP.StartAudio(); } else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport; peerConnection.OnSendReport -= RtpSession_OnSendReport; await windowsAudioEP.CloseAudio(); } }; peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); if (media == SDPMediaTypesEnum.audio) { windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); } }; var offerSdp = peerConnection.createOffer(null); await peerConnection.setLocalDescription(offerSdp); logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}."); logger.LogDebug(offerSdp.sdp); context.WebSocket.Send(offerSdp.sdp); return(peerConnection); }
private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); //AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.SineWave }); //audioSource.OnAudioSourceEncodedSample += pc.SendAudio; WindowsAudioEndPoint audioSource = new WindowsAudioEndPoint(new AudioEncoder()); audioSource.OnAudioSourceEncodedSample += pc.SendAudio; MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(audioTrack); pc.OnAudioFormatsNegotiated += (audioFormats) => audioSource.SetAudioSourceFormat(audioFormats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
private static Task <RTCPeerConnection> CreatePeerConnection() { var peerConnection = new RTCPeerConnection(null); var videoEP = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint(); //var videoEP = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint(); //var videoEP = new FFmpegVideoEndPoint(); videoEP.RestrictFormats(format => format.Codec == VideoCodecsEnum.VP8); videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { _form.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } } })); }; // Sink (speaker) only audio end point. WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false); MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); peerConnection.OnVideoFrameReceived += videoEP.GotVideoFrame; peerConnection.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First()); peerConnection.OnAudioFormatsNegotiated += (formats) => windowsAudioEP.SetAudioSinkFormat(formats.First()); peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.connected) { await windowsAudioEP.StartAudio(); } else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { await windowsAudioEP.CloseAudio(); } }; //peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => //{ // bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); // Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); //}; peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); if (media == SDPMediaTypesEnum.audio) { windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); } }; return(Task.FromResult(peerConnection)); }