public async Task RequestSdpAnswer(SdpAnswerRequestArgs args) { var answer = _peer.GetSdpAnswer(args); bool useInsetableStreams = args.RemotePeerSupportsInsertableStreams; _peer.ExtractTimestampFromFrame = useInsetableStreams; await _connection.SendSdpAnswer(new SdpAnswerResponseArgs { ConnectionId = _peer.ConnectionId, SdpAnswer = answer.sdp, UseInsertableStreams = useInsetableStreams }); }
public async Task RequestSdpAnswer(SdpAnswerRequestArgs args) { Console.WriteLine("RequestSdpAnswer"); var supportsInsertableStreams = await jsRuntime.InvokeAsync <bool>("WebRtcHandlerManager.areInsertableStreamsSupported", handlerId); var sdpAnswer = await jsRuntime.InvokeAsync <string>("WebRtcHandlerManager.getSdpAnswer", handlerId, args.ConnectionId, args.SdpOffer, args.RemotePeerSupportsInsertableStreams, receivingVideoElement, rtpTimestamp, objRef); await virtualStudioConnection.SendSdpAnswer(new SdpAnswerResponseArgs { ConnectionId = args.ConnectionId, SdpAnswer = sdpAnswer, UseInsertableStreams = args.RemotePeerSupportsInsertableStreams && supportsInsertableStreams }); }
public Task RequestSdpAnswer(SdpAnswerRequestArgs args) { connectionId = args.ConnectionId; webRtcClient.ReceiveOffer(args.ConnectionId, new SessionDescription(SessionDescription.SdpType.Offer, args.SdpOffer), (sdpAnswer, error) => { virtualStudioConnection.SendSdpAnswer(new SdpAnswerResponseArgs { ConnectionId = args.ConnectionId, SdpAnswer = sdpAnswer.Description, UseInsertableStreams = false }); }); return(Task.CompletedTask); }
private async Task RequestSdpAnswer(SdpAnswerRequestArgs args) { await InvokeOnClients(webRtcClient => webRtcClient.RequestSdpAnswer(args)); SdpAnswerRequestReceived?.Invoke(this, args); }
public RTCSessionDescriptionInit GetSdpAnswer(SdpAnswerRequestArgs args) { ConnectionId = args.ConnectionId; RTCConfiguration config = new RTCConfiguration { X_UseRtpFeedbackProfile = true }; var pc = new RTCPeerConnection(config); // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support. if (!_noAudio) { MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }, MediaStreamStatusEnum.RecvOnly); pc.addTrack(audioTrack); } // MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(96, "VP8", 90000, "x-google-max-bitrate=5000000"), MediaStreamStatusEnum.RecvOnly); // pc.OnVideoFrameReceived += _videoSink.GotVideoFrame; // pc.OnVideoFormatsNegotiated += (formats) => _videoSink.SetVideoSinkFormat(formats.First()); pc.OnVideoFrameReceived += (endpoint, rtpTimestampMs, frame, format) => { int frameLength = frame.Length; string rtpTimestamp = new DateTime(rtpTimestampMs * 100).ToString("dd.MM.yyyy HH:mm:ss,ffff"); string timestamp = "none"; long timestampMs = 0; if (ExtractTimestampFromFrame) { frameLength -= 8; var span = new Span <byte>(frame, frameLength, 8); span.Reverse(); timestampMs = BitConverter.ToInt64(span); if (timestampMs > 0 && timestampMs < 4398046511104) { timestamp = DateTime.UnixEpoch.AddMilliseconds(timestampMs).ToString("dd.MM.yyyy HH:mm:ss,ffff"); } } Console.WriteLine($"On frame received: byte[{frame.Length}], rtpTs: {rtpTimestamp} extractedTs: {timestamp})"); _listener.OnFrameReceived(this, new Memory <byte>(frame, 0, frameLength), rtpTimestampMs, timestampMs); }; pc.onicecandidate += (iceCandidate) => { Console.WriteLine("On ice candidate"); _listener.OnIceCandidate(this, iceCandidate); }; pc.onconnectionstatechange += (state) => { Console.WriteLine($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } _listener.OnConnectionStateChanged(this, state); }; pc.OnSendReport += (media, sr) => Console.WriteLine($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state change to {state}."); var sdpOffer = SDP.ParseSDPDescription(args.SdpOffer); //sdpOffer.Media.FirstOrDefault()?. var videoMedia = sdpOffer.Media.FirstOrDefault(m => m.Media == SDPMediaTypesEnum.video); var h264VideoFormat = videoMedia.MediaFormats.Values.First(m => m.Rtpmap == "H264/90000").ToVideoFormat(); var videoTrack = new MediaStreamTrack(h264VideoFormat, MediaStreamStatusEnum.RecvOnly); pc.addTrack(videoTrack); //var track = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List<SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 102, "H264", 90000) }); //pc.addTrack(track); var setRemoteDescriptionResult = pc.setRemoteDescription(new RTCSessionDescriptionInit { type = RTCSdpType.offer, sdp = args.SdpOffer }); //MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(VideoCodecsEnum.H264, 102), MediaStreamStatusEnum.RecvOnly); //pc.addTrack(videoTrack); var answer = pc.createAnswer(); var setLocalDescriptionResult = pc.setLocalDescription(answer); Console.WriteLine("SDP Offer:\n" + args.SdpOffer); Console.WriteLine("SDP Answer 2:\n" + answer.sdp); _peerConnection = pc; return(answer); }