public BitmapFrameD2D1(BouncingBallRenderer renderer, D2D1.DeviceContext context2D) : base(renderer) { using (var surface = Texture.QueryInterface <SharpDX.DXGI.Surface>()) { Bitmap = new D2D1.Bitmap1(context2D, surface); } }
private static unsafe void Render() { const int frameWidth = 2560; const int frameHeight = 1440; const int frameRate = 60; // var options = VideoEncoderOptions.OptimizedFor(frameWidth, frameHeight, frameRate); var options = new VideoEncoderOptions { MaxBitsPerSecond = 12_000_000, MinBitsPerSecond = 10_000_000, MaxFramesPerSecond = frameRate }; using (var sender = new ObservablePeerConnection(new PeerConnectionOptions())) using (var receiver = new ObservablePeerConnection(new PeerConnectionOptions { CanReceiveVideo = true })) { using (var vt = new ObservableVideoTrack(sender, options)) { using (var rnd = new BouncingBallRenderer(vt, 10, new BoundingBallOptions { VideoFrameWidth = frameWidth, VideoFrameHeight = frameHeight, VideoFrameQueueSize = 2 })) { receiver.Connect( Observable.Never <DataMessage>(), sender.LocalSessionDescriptionStream, sender.LocalIceCandidateStream); sender.Connect( Observable.Never <DataMessage>(), receiver.LocalSessionDescriptionStream, receiver.LocalIceCandidateStream); sender.CreateOffer(); int remoteVideoFrameReceivedCount = 0; receiver.RemoteVideoFrameReceived += (pc, frame) => { remoteVideoFrameReceivedCount += 1; // Save as JPEG for debugging. SLOW! // TODO: Doesn't work yet, H264 decoding not yet supported, only VP8 //if (frame is VideoFrameYuvAlpha yuvFrame && yuvFrame.Width == yuvFrame.StrideY) //{ // var span = new ReadOnlySpan<byte>(yuvFrame.DataY.ToPointer(), yuvFrame.Width * yuvFrame.Height); // using (var image = Image.LoadPixelData<Y8>(span, yuvFrame.Width, yuvFrame.Height)) // { // image.Save($@"frame_{remoteVideoFrameReceivedCount:D000000}.bmp"); // } //} }; using (var clock = new PreciseWaitableClock(EventResetMode.AutoReset)) { var startTime = clock.GetCurrentTime().AddSeconds(1); var nextTime = startTime; // The remote peer connection is not immediately ready to receive frames, // so we keep sending until it succeeds. // TODO: Figure out what webrtc event can be used for this. while (!Console.KeyAvailable) { clock.SetFutureEventTime(nextTime); clock.WaitHandle.WaitOne(); var elapsedTime = clock.GetCurrentTime() - startTime; rnd.SendFrame(elapsedTime); nextTime = nextTime.AddSeconds(1.0 / frameRate); } } } // The video renderer is now disposed while the video track is still encoding some textures // This should not crash. // We need to wait a while before disposing the video-track and peer-connection to check this. Thread.Sleep(100); } } } }