void PrepareInternalObjects() { emptyVideoFrame = new Interop.VideoFrame { Width = 0, Height = 0, LineStride = 0, FourCC = _enableAlpha ? Interop.FourCC.UYVA : Interop.FourCC.UYVY, FrameFormat = Interop.FrameFormat.Progressive, Data = IntPtr.Zero, Metadata = IntPtr.Zero }; if (_send == null) { _send = _captureMethod == CaptureMethod.GameView ? SharedInstance.GameViewSend : Interop.Send.Create(_ndiName); } if (_converter == null) { _converter = new FormatConverter(_resources); } if (_onReadback == null) { _onReadback = OnReadback; } }
unsafe void OnReadback(AsyncGPUReadbackRequest request) { // Metadata retrieval using (var metadata = _metadataQueue.Dequeue()) { // Ignore errors. if (request.hasError) { return; } // Ignore it if the NDI object has been already disposed. if (_send == null || _send.IsInvalid || _send.IsClosed) { return; } // Pixel format (depending on alpha mode) var fourcc = _enableAlpha ? Interop.FourCC.UYVA : Interop.FourCC.UYVY; // Readback data retrieval var data = request.GetData <byte>(); var pdata = NativeArrayUnsafeUtility.GetUnsafeReadOnlyPtr(data); // Data size verification if (data.Length / sizeof(uint) != Util.FrameDataCount(_width, _height, _enableAlpha)) { return; } // Frame data setup var frame = new Interop.VideoFrame { Width = _width, Height = _height, LineStride = _width * 2, FourCC = _enableAlpha ? Interop.FourCC.UYVA : Interop.FourCC.UYVY, FrameFormat = Interop.FrameFormat.Progressive, Data = (System.IntPtr)pdata, Metadata = metadata }; // Send via NDI _send.SendVideoAsync(frame); } }
unsafe void OnReadback(AsyncGPUReadbackRequest req) { // Readback entry retrieval var entry = _pool.FindEntry(req.GetData <byte>()); if (entry == null) { return; } // Invalid state detection if (req.hasError || _send == null || _send.IsInvalid || _send.IsClosed) { // Do nothing but release the readback entry. _pool.Free(entry); return; } // Frame data var frame = new Interop.VideoFrame { Width = entry.Width, Height = entry.Height, LineStride = entry.Stride, FourCC = entry.FourCC, FrameFormat = Interop.FrameFormat.Progressive, Data = entry.ImagePointer, Metadata = entry.MetadataPointer }; // Async-send initiation // This causes a synchronization for the last frame -- i.e., It locks // the thread if the last frame is still under processing. _send.SendVideoAsync(frame); // We don't need the last frame anymore. Free it. _pool.FreeMarkedEntry(); // Mark this frame to get freed in the next frame. _pool.Mark(entry); }
unsafe void OnReadback(AsyncGPUReadbackRequest request) { // Metadata retrieval using (var metadata = _metadataQueue.Dequeue()) { // Ignore errors. if (request.hasError) { return; } // Ignore it if the NDI object has been already disposed. if (_send == null || _send.IsInvalid || _send.IsClosed || !_enableVideoFrames) { return; } // Pixel format (depending on alpha mode) var fourcc = _enableAlpha ? Interop.FourCC.UYVA : Interop.FourCC.UYVY; // Readback data retrieval var data = request.GetData <byte>(); // NDI SDK Documentation p.21 re: send_video_v2_async // // If you call this and then free the pointer, your application will // most likely crash in an NDI thread because the SDK is still using the video frame // that was passed to the call. // One possible solution is to ping pong between two buffers on // alternating calls to NDIlib_send_send_video_v2_async if (data.Length <= 0) { return; } if (videoFrameBuffer1 == null || videoFrameBuffer1.Length <= 0) { videoFrameBuffer1 = new byte[data.Length]; bufferHandle1 = GCHandle.Alloc(videoFrameBuffer1, GCHandleType.Pinned); } if (videoFrameBuffer2 == null || videoFrameBuffer2.Length <= 0) { videoFrameBuffer2 = new byte[data.Length]; bufferHandle2 = GCHandle.Alloc(videoFrameBuffer2, GCHandleType.Pinned); } // Handle frame size change if (videoFrameBuffer1.Length != data.Length) { _send.SendVideoAsync(emptyVideoFrame); bufferHandle1.Free(); videoFrameBuffer1 = new byte[data.Length]; bufferHandle1 = GCHandle.Alloc(videoFrameBuffer1, GCHandleType.Pinned); } if (videoFrameBuffer2.Length != data.Length) { _send.SendVideoAsync(emptyVideoFrame); bufferHandle2.Free(); videoFrameBuffer2 = new byte[data.Length]; bufferHandle2 = GCHandle.Alloc(videoFrameBuffer2, GCHandleType.Pinned); } // Ping pong handles var pdata = ping == 0 ? bufferHandle1.AddrOfPinnedObject() : bufferHandle2.AddrOfPinnedObject(); data.CopyTo(ping == 0 ? videoFrameBuffer1 : videoFrameBuffer2); ping = ping == 0 ? 1 : 0; // Data size verification if (data.Length / sizeof(uint) != Util.FrameDataCount(_width, _height, _enableAlpha)) { return; } // Frame data setup var frame = new Interop.VideoFrame { Width = _width, Height = _height, LineStride = _width * 2, FrameRateN = (int)frameRateND.x, FrameRateD = (int)frameRateND.y, FourCC = _enableAlpha ? Interop.FourCC.UYVA : Interop.FourCC.UYVY, FrameFormat = Interop.FrameFormat.Progressive, Data = (System.IntPtr)pdata, Metadata = metadata }; // Send via NDI _send.SendVideoAsync(frame); if (metadata != IntPtr.Zero) { _onVideoMetadataSent?.Invoke(); } } }
public FrameType Capture (out VideoFrame video, IntPtr audio, IntPtr metadata, uint timeout) => _Capture(this, out video, audio, metadata, timeout);