Exemple #1
0
        private static IRenderer CreateRenderer(ObservableVideoTrack videoTrack, ILogger logger)
        {
            PeerConnection.Configure(new GlobalOptions
            {
                MinimumLogLevel = TraceLevel.Info
            });

            bool isWindows     = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
            bool supportsNvEnc = PeerConnection.SupportsHardwareTextureEncoding;

            // TODO: Add support for OpenGL, and test it.
            // Maybe use https://github.com/mellinoe/veldrid
            return(isWindows && supportsNvEnc
                ? (IRenderer) new BouncingBallRenderer(videoTrack,
                                                       8,
                                                       new BoundingBallOptions
            {
                VideoFrameWidth = VideoFrameWidth,
                VideoFrameHeight = VideoFrameHeight,
                PreviewWindowOptions = new PreviewWindowOptions
                {
                    Width = 1920 / 2
                },

                TimeRulerOptions = new TimeRulerOptions()
            })
                : new ImageSharpRenderer(VideoFrameWidth, VideoFrameHeight, videoTrack));
        }
        public D3D11Renderer(ObservableVideoTrack videoTrack, RendererOptions options)
            : base(videoTrack, options)
        {
            // _factoryDWrite = new DWrite.Factory(DWrite.FactoryType.Shared);

            var device2D = new D2D1.Device(DeviceDXGI, new D2D1.CreationProperties
            {
                DebugLevel    = D2D1.DebugLevel.Warning,
                ThreadingMode = D2D1.ThreadingMode.MultiThreaded,
                Options       = D2D1.DeviceContextOptions.None
            });

            _context2D = new D2D1.DeviceContext(device2D, D2D1.DeviceContextOptions.None);

            // Load the background image
            using (var factoryWic = new WIC.ImagingFactory2())
                using (var decoder = new WIC.JpegBitmapDecoder(factoryWic))
                    using (var inputStream = new WIC.WICStream(factoryWic, "background-small.jpg", NativeFileAccess.Read))
                        using (var formatConverter = new WIC.FormatConverter(factoryWic))
                            using (var bitmapScaler = new WIC.BitmapScaler(factoryWic))
                            {
                                decoder.Initialize(inputStream, WIC.DecodeOptions.CacheOnLoad);
                                formatConverter.Initialize(decoder.GetFrame(0), WIC.PixelFormat.Format32bppPBGRA);
                                bitmapScaler.Initialize(formatConverter, VideoFrameWidth, VideoFrameHeight,
                                                        WIC.BitmapInterpolationMode.Fant);
                                _backgroundBitmap = D2D1.Bitmap1.FromWicBitmap(_context2D, bitmapScaler);
                            }

            // Create render target
            _ballEllipse = new D2D1.Ellipse {
                RadiusX = VideoFrameWidth / 20f, RadiusY = VideoFrameWidth / 20f
            };

            _ballBrush = new D2D1.SolidColorBrush(_context2D, new RawColor4(1f, 1f, 0f, 1f));
        }
        private static IRenderer CreateRenderer(ObservableVideoTrack videoTrack, ILogger logger)
        {
            bool isWindows     = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
            bool supportsNvEnc = PeerConnection.SupportsHardwareTextureEncoding;

            // TODO: Add support for OpenGL, and test it.
            // Maybe use https://github.com/mellinoe/veldrid
            return(isWindows && supportsNvEnc
                ? (IRenderer) new D3D11Renderer(videoTrack,
                                                new GraphicsD3D11.RendererOptions
            {
                VideoFrameWidth = VideoFrameWidth,
                VideoFrameHeight = VideoFrameHeight,
            })
                : new ImageSharpRenderer(VideoFrameWidth, VideoFrameHeight, videoTrack));
        }
Exemple #4
0
 public SharedState(ObservableVideoTrack videoTrack, ILogger logger)
 {
     VideoTrack = videoTrack;
     Logger     = logger;
 }
Exemple #5
0
        public static async Task Run(WebSocket ws, CancellationToken cancellation, ILogger logger)
        {
            T ToObject <T>(JToken token)
            {
                var obj = token.ToObject <T>();

                //var json = JToken.FromObject(obj);
                //Debug.Assert(JToken.DeepEquals(token, json));
                return(obj);
            }

            var renderThread = new Thread(VideoRenderer);

            // PeerConnection.Configure(options => options.IsSingleThreaded = true);

            using (var pc = new ObservablePeerConnection(new PeerConnectionOptions
            {
                Name = "WebRTC Server",
                IceServers = { "stun:stun.l.google.com:19302" }
            }))
                using (pc.LocalIceCandidateStream.Subscribe(ice => ws.SendJsonAsync("ice", ice, cancellation)))
                    using (pc.LocalSessionDescriptionStream.Subscribe(sd => ws.SendJsonAsync("sdp", sd, cancellation)))
                        using (var videoTrack = new ObservableVideoTrack(pc,
                                                                         VideoEncoderOptions.OptimizedFor(VideoFrameWidth, VideoFrameHeight, VideoFrameRate, VideoMotion.Medium, VideoMotion.High)))
                        {
                            var msgStream = Observable.Never <DataMessage>();
                            var iceStream = new Subject <IceCandidate>();
                            var sdpStream = new Subject <SessionDescription>();

                            var sharedState = new SharedState(videoTrack, logger);
                            renderThread.Start(sharedState);

                            pc.Connect(msgStream, sdpStream, iceStream);

                            pc.AddDataChannel(new DataChannelOptions());

                            pc.CreateOffer();

                            var reader = new WebSocketReader(ws, cancellation);

                            while (reader.CanRead && !cancellation.IsCancellationRequested)
                            {
                                var message = await reader.ReadJsonAsync();

                                if (message == null)
                                {
                                    break;
                                }

                                var payload = message["payload"];

                                if (payload.Any())
                                {
                                    switch (message["action"].Value <string>())
                                    {
                                    case "ice":
                                    {
                                        iceStream.OnNext(ToObject <IceCandidate>(payload));
                                        break;
                                    }

                                    case "sdp":
                                    {
                                        sdpStream.OnNext(ToObject <SessionDescription>(payload));
                                        break;
                                    }

                                    case "pos":
                                    {
                                        sharedState.MouseMessageQueue.Enqueue(ToObject <MouseMessage>(payload));
                                        break;
                                    }
                                    }
                                }
                            }

                            logger.LogInformation(ws.CloseStatus.HasValue ? "Websocket was closed by client" : "Application is stopping...");

                            renderThread.Interrupt();
                            renderThread.Join();
                        }
        }
        private static unsafe void Render()
        {
            const int frameWidth  = 2560;
            const int frameHeight = 1440;
            const int frameRate   = 60;

            // var options = VideoEncoderOptions.OptimizedFor(frameWidth, frameHeight, frameRate);
            var options = new VideoEncoderOptions
            {
                MaxBitsPerSecond   = 12_000_000,
                MinBitsPerSecond   = 10_000_000,
                MaxFramesPerSecond = frameRate
            };

            using (var sender = new ObservablePeerConnection(new PeerConnectionOptions()))
                using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions {
                    CanReceiveVideo = true
                }))
                {
                    using (var vt = new ObservableVideoTrack(sender, options))
                    {
                        using (var rnd = new BouncingBallRenderer(vt, 10, new BoundingBallOptions
                        {
                            VideoFrameWidth = frameWidth,
                            VideoFrameHeight = frameHeight,
                            VideoFrameQueueSize = 2
                        }))
                        {
                            receiver.Connect(
                                Observable.Never <DataMessage>(),
                                sender.LocalSessionDescriptionStream,
                                sender.LocalIceCandidateStream);

                            sender.Connect(
                                Observable.Never <DataMessage>(),
                                receiver.LocalSessionDescriptionStream,
                                receiver.LocalIceCandidateStream);

                            sender.CreateOffer();

                            int remoteVideoFrameReceivedCount = 0;

                            receiver.RemoteVideoFrameReceived += (pc, frame) =>
                            {
                                remoteVideoFrameReceivedCount += 1;

                                // Save as JPEG for debugging. SLOW!
                                // TODO: Doesn't work yet, H264 decoding not yet supported, only VP8
                                //if (frame is VideoFrameYuvAlpha yuvFrame && yuvFrame.Width == yuvFrame.StrideY)
                                //{
                                //    var span = new ReadOnlySpan<byte>(yuvFrame.DataY.ToPointer(), yuvFrame.Width * yuvFrame.Height);
                                //    using (var image = Image.LoadPixelData<Y8>(span, yuvFrame.Width, yuvFrame.Height))
                                //    {
                                //        image.Save($@"frame_{remoteVideoFrameReceivedCount:D000000}.bmp");
                                //    }
                                //}
                            };

                            using (var clock = new PreciseWaitableClock(EventResetMode.AutoReset))
                            {
                                var startTime = clock.GetCurrentTime().AddSeconds(1);

                                var nextTime = startTime;
                                // The remote peer connection is not immediately ready to receive frames,
                                // so we keep sending until it succeeds.
                                // TODO: Figure out what webrtc event can be used for this.
                                while (!Console.KeyAvailable)
                                {
                                    clock.SetFutureEventTime(nextTime);

                                    clock.WaitHandle.WaitOne();

                                    var elapsedTime = clock.GetCurrentTime() - startTime;
                                    rnd.SendFrame(elapsedTime);

                                    nextTime = nextTime.AddSeconds(1.0 / frameRate);
                                }
                            }
                        }

                        // The video renderer is now disposed while the video track is still encoding some textures
                        // This should not crash.
                        // We need to wait a while before disposing the video-track and peer-connection to check this.
                        Thread.Sleep(100);
                    }
                }
        }
    }
        public void RendersAndSendsFrameUsingD3D11()
        {
            bool isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
            bool hasNvEnc  = PeerConnection.SupportsHardwareTextureEncoding;

            if (isWindows && hasNvEnc)
            {
                PeerConnection.Configure(new GlobalOptions
                {
                    UseFakeDecoders  = true,
                    LogToDebugOutput = false,
                    MinimumLogLevel  = TraceLevel.Info
                });

                using (var sender = new ObservablePeerConnection(new PeerConnectionOptions()))
                    using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions {
                        CanReceiveVideo = true
                    }))
                        using (var vt = new ObservableVideoTrack(sender, VideoEncoderOptions.OptimizedFor(320, 240, 10)))
                        {
                            using (var rnd = new VideoRenderer(vt, new RendererOptions {
                                VideoFrameQueueSize = 2
                            }))
                            {
                                // Wait until sender and receiver are connected,
                                // signaling is complete,
                                // and video track is added.

                                // TODO: When using tasks for this, this test hangs when disposing!

                                // ReSharper disable once InvokeAsExtensionMethod
                                //var ready = Observable.Zip(
                                //    receiver.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected),
                                //    sender.ConnectionStateStream.FirstAsync(s => s == ConnectionState.Connected),
                                //    receiver.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable),
                                //    sender.SignalingStateStream.FirstAsync(s => s == SignalingState.Stable),
                                //    receiver.RemoteTrackChangeStream.FirstAsync(
                                //        c => !string.IsNullOrEmpty(c.TransceiverMid) &
                                //             c.MediaKind == TrackMediaKind.Video &&
                                //             c.ChangeKind == TrackChangeKind.Changed),
                                //    (a, b, c, d, e) => true);
                                //// Wait until connected and video track is ready.
                                //var ev = new AutoResetEvent(false);
                                //ready.Subscribe(_ => ev.Set());

                                receiver.Connect(
                                    Observable.Never <DataMessage>(),
                                    sender.LocalSessionDescriptionStream,
                                    sender.LocalIceCandidateStream);

                                sender.Connect(
                                    Observable.Never <DataMessage>(),
                                    receiver.LocalSessionDescriptionStream,
                                    receiver.LocalIceCandidateStream);

                                sender.CreateOffer();

                                int remoteVideoFrameReceivedCount = 0;

                                receiver.RemoteVideoFrameReceived += (pc, frame) =>
                                {
                                    remoteVideoFrameReceivedCount += 1;
                                };

                                // The remote peer connection is not immediately ready to receive frames,
                                // so we keep sending until it succeeds.
                                // TODO: Figure out what webrtc event can be used for this.
                                while (remoteVideoFrameReceivedCount == 0)
                                {
                                    using (rnd.TakeNextFrameForSending())
                                    {
                                    }
                                }

                                // Continue sending until the video queue is empty
                                while (rnd.VideoFrameQueueCount > 0)
                                {
                                    using (rnd.TakeNextFrameForSending())
                                    {
                                    }
                                }
                            }

                            // The video renderer is now disposed while the video track is still encoding some textures
                            // This should not crash.
                            // We need to wait a while before disposing the video-track and peer-connection to check this.
                            Thread.Sleep(100);
                        }
            }
        }