public VideoTrack(PeerConnection peerConnection, VideoEncoderOptions options) { PeerConnection = peerConnection; FrameRate = options.MaxFramesPerSecond; TrackId = peerConnection.AddVideoTrack(options); PeerConnection.LocalVideoFrameEncoded += OnLocalVideoFrameEncoded; }
public ObservableVideoTrack(ObservablePeerConnection peerConnection, VideoEncoderOptions options) : base(peerConnection, options) { }
public static async Task Run(WebSocket ws, CancellationToken cancellation, ILogger logger) { T ToObject <T>(JToken token) { var obj = token.ToObject <T>(); //var json = JToken.FromObject(obj); //Debug.Assert(JToken.DeepEquals(token, json)); return(obj); } var renderThread = new Thread(VideoRenderer); // PeerConnection.Configure(options => options.IsSingleThreaded = true); using (var pc = new ObservablePeerConnection(new PeerConnectionOptions { Name = "WebRTC Server", IceServers = { "stun:stun.l.google.com:19302" } })) using (pc.LocalIceCandidateStream.Subscribe(ice => ws.SendJsonAsync("ice", ice, cancellation))) using (pc.LocalSessionDescriptionStream.Subscribe(sd => ws.SendJsonAsync("sdp", sd, cancellation))) using (var videoTrack = new ObservableVideoTrack(pc, VideoEncoderOptions.OptimizedFor(VideoFrameWidth, VideoFrameHeight, VideoFrameRate, VideoMotion.Medium, VideoMotion.High))) { var msgStream = Observable.Never <DataMessage>(); var iceStream = new Subject <IceCandidate>(); var sdpStream = new Subject <SessionDescription>(); var sharedState = new SharedState(videoTrack, logger); renderThread.Start(sharedState); pc.Connect(msgStream, sdpStream, iceStream); pc.AddDataChannel(new DataChannelOptions()); pc.CreateOffer(); var reader = new WebSocketReader(ws, cancellation); while (reader.CanRead && !cancellation.IsCancellationRequested) { var message = await reader.ReadJsonAsync(); if (message == null) { break; } var payload = message["payload"]; if (payload.Any()) { switch (message["action"].Value <string>()) { case "ice": { iceStream.OnNext(ToObject <IceCandidate>(payload)); break; } case "sdp": { sdpStream.OnNext(ToObject <SessionDescription>(payload)); break; } case "pos": { sharedState.MouseMessageQueue.Enqueue(ToObject <MouseMessage>(payload)); break; } } } } logger.LogInformation(ws.CloseStatus.HasValue ? "Websocket was closed by client" : "Application is stopping..."); renderThread.Interrupt(); renderThread.Join(); } }
private void Run() { try { // For debugging, run everything on this thread. // Should never be done in production. // Note that webrtc callbacks are done on the signaling thread, and must return asap. PeerConnection.Configure(new GlobalOptions { UseWorkerThread = false, UseSignalingThread = false, ForceSoftwareVideoEncoder = true, MinimumLogLevel = System.Diagnostics.TraceLevel.Info, LogToStandardError = false, LogToDebugOutput = false }); PeerConnection.MessageLogged += (message, severity) => { severity.WriteToConsole(message); }; Console.OutputEncoding = Encoding.UTF8; const int frameWidth = 320; const int frameHeight = 180; const int frameRate = 10; using (var senderOutgoingMessages = new ReplaySubject <DataMessage>()) using (var sender = new ObservablePeerConnection(new PeerConnectionOptions { Name = "Sender" })) using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions { Name = "Receiver", CanReceiveVideo = true })) using (var background = Image.Load <Argb32>("background-small.jpg")) using (receiver.ReceivedVideoStream.Buffer(2).Subscribe(SaveFrame)) using (var imageFrame = new Image <Argb32>(frameWidth, frameHeight)) using (var videoTrack = new VideoTrack(sender, VideoEncoderOptions.OptimizedFor(frameWidth, frameHeight, frameRate))) { background.Mutate(ctx => ctx.Resize(frameWidth, frameHeight)); senderOutgoingMessages.OnNext(new DataMessage("data", "Hello")); sender.CreateOffer(); sender.Connect(senderOutgoingMessages, receiver.LocalSessionDescriptionStream, receiver.LocalIceCandidateStream); var receiverOutgoingMessages = receiver .ReceivedDataStream .Where(msg => msg.AsText == "Hello") .Do(msg => Console.WriteLine($"Received message {msg.AsText}")) .Select(msg => new DataMessage(msg.Label, "World")); receiver.Connect(receiverOutgoingMessages, sender.LocalSessionDescriptionStream, sender.LocalIceCandidateStream); sender.AddDataChannel(new DataChannelOptions()); Console.WriteLine("Press any key to exit"); int localFrameIndex = 0; var timeout = TimeSpan.FromMilliseconds(1000.0 / frameRate); //while (!Console.KeyAvailable && PeerConnection.PumpQueuedMessages(timeout)) while (PeerConnection.PumpQueuedMessages(timeout)) { var frame = imageFrame.Frames[0]; var pixels = MemoryMarshal.Cast <Argb32, uint>(frame.GetPixelSpan()); videoTrack.SendVideoFrame(MemoryMarshal.GetReference(pixels), frame.Width * 4, frame.Width, frame.Height, VideoFrameFormat.Argb32); imageFrame.Mutate(ctx => ctx.DrawImage(GraphicsOptions.Default, background).Rotate(localFrameIndex * 10).Crop(frameWidth, frameHeight)); ++localFrameIndex; } sender.RemoveDataChannel("data"); } } catch (Exception ex) { Console.WriteLine($"*** FAILURE: {ex}"); } Console.WriteLine("Press ENTER to exit"); Console.ReadLine(); }
internal int AddVideoTrack(VideoEncoderOptions options) { var id = Native.AddVideoTrack(_nativePtr, options.Label, options.MinBitsPerSecond, options.MaxBitsPerSecond, options.MaxFramesPerSecond); return(Native.Check(id)); }
public void RendersAndSendsFrameUsingD3D11() { bool isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); bool hasNvEnc = PeerConnection.SupportsHardwareTextureEncoding; if (isWindows && hasNvEnc) { PeerConnection.Configure(new GlobalOptions { UseFakeDecoders = true, LogToDebugOutput = false, MinimumLogLevel = TraceLevel.Info }); using (var sender = new ObservablePeerConnection(new PeerConnectionOptions())) using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions { CanReceiveVideo = true })) using (var vt = new ObservableVideoTrack(sender, VideoEncoderOptions.OptimizedFor(320, 240, 10))) { using (var rnd = new VideoRenderer(vt, new RendererOptions { VideoFrameQueueSize = 2 })) { // Wait until sender and receiver are connected, // signaling is complete, // and video track is added. // TODO: When using tasks for this, this test hangs when disposing! // ReSharper disable once InvokeAsExtensionMethod //var ready = Observable.Zip( // receiver.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected), // sender.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected), // receiver.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable), // sender.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable), // receiver.RemoteTrackChangeStream.FirstAsync( // c => !string.IsNullOrEmpty(c.TransceiverMid) & // c.MediaKind == TrackMediaKind.Video && // c.ChangeKind == TrackChangeKind.Changed), // (a, b, c, d, e) => true); //// Wait until connected and video track is ready. //var ev = new AutoResetEvent(false); //ready.Subscribe(_ => ev.Set()); receiver.Connect( Observable.Never <DataMessage>(), sender.LocalSessionDescriptionStream, sender.LocalIceCandidateStream); sender.Connect( Observable.Never <DataMessage>(), receiver.LocalSessionDescriptionStream, receiver.LocalIceCandidateStream); sender.CreateOffer(); int remoteVideoFrameReceivedCount = 0; receiver.RemoteVideoFrameReceived += (pc, frame) => { remoteVideoFrameReceivedCount += 1; }; // The remote peer connection is not immediately ready to receive frames, // so we keep sending until it succeeds. // TODO: Figure out what webrtc event can be used for this. while (remoteVideoFrameReceivedCount == 0) { using (rnd.TakeNextFrameForSending()) { } } // Continue sending until the video queue is empty while (rnd.VideoFrameQueueCount > 0) { using (rnd.TakeNextFrameForSending()) { } } } // The video renderer is now disposed while the video track is still encoding some textures // This should not crash. // We need to wait a while before disposing the video-track and peer-connection to check this. Thread.Sleep(100); } } }