public WebRTCPeer() { logger = SIPSorcery.LogFactory.CreateLogger("webrtc"); VideoEncoderEndPoint = new Vp8NetVideoEncoderEndPoint(); _cts = new CancellationTokenSource(); _webrtcRestSignaling = new WebRTCRestSignalingPeer( REST_SIGNALING_SERVER, REST_SIGNALING_MY_ID, REST_SIGNALING_THEIR_ID, this.CreatePeerConnection); }
private static Task <RTCPeerConnection> CreatePeerConnection() { var peerConnection = new RTCPeerConnection(); //FileStream captureStream = new FileStream("capture.stm", FileMode.Create, FileAccess.ReadWrite); var videoEP = new Vp8NetVideoEncoderEndPoint(); videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } } })); } }; // Sink (speaker) only audio end point. //WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false); //MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly); //peerConnection.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); peerConnection.OnVideoFrameReceived += (rep, ts, frame, pixelFmt) => { Console.WriteLine($"Video frame received {frame.Length} bytes."); //Console.WriteLine(frame.HexStr()); //captureStream.Write(Encoding.ASCII.GetBytes($"{frame.Length},")); //captureStream.Write(frame); //captureStream.Flush(); videoEP.GotVideoFrame(rep, ts, frame, pixelFmt); }; peerConnection.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First()); //peerConnection.OnAudioFormatsNegotiated += (formats) => // windowsAudioEP.SetAudioSinkFormat(formats.First()); peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed || state == RTCPeerConnectionState.disconnected) { //captureStream.Close(); } }; peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); //if (media == SDPMediaTypesEnum.audio) //{ // windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); //} }; return(Task.FromResult(peerConnection)); }