/// <summary> /// the simple constructor only deals with BGRA.For other color formats you'll need to handle it manually. /// Defaults to progressive but can be changed. /// </summary> /// <param name="width"></param> /// <param name="height"></param> /// <param name="aspectRatio"></param> /// <param name="frameRateNumerator"></param> /// <param name="frameRateDenominator"></param> /// <param name="format"></param> public VideoFrame(int width, int height, float aspectRatio, int frameRateNumerator, int frameRateDenominator, NDIlib.frame_format_type_e format = NDIlib.frame_format_type_e.frame_format_type_progressive) { // we have to know to free it later _memoryOwned = true; int stride = (width * 32 /*BGRA bpp*/ + 7) / 8; int bufferSize = height * stride; // allocate some memory for a video buffer IntPtr videoBufferPtr = Marshal.AllocHGlobal(bufferSize); _ndiVideoFrame = new NDIlib.video_frame_v2_t() { xres = width, yres = height, FourCC = NDIlib.FourCC_type_e.FourCC_type_BGRA, frame_rate_N = frameRateNumerator, frame_rate_D = frameRateDenominator, picture_aspect_ratio = aspectRatio, frame_format_type = format, timecode = NDIlib.send_timecode_synthesize, p_data = videoBufferPtr, line_stride_in_bytes = stride, p_metadata = IntPtr.Zero, timestamp = 0 }; }
/// <summary> /// Create a VideoFrame from a <see cref="SharpDX.Direct3D11.Texture2D"/> /// </summary> /// <param name="texture"></param> /// <param name="aspectRatio"></param> /// <param name="fourCC"></param> /// <param name="frameRateNumerator"></param> /// <param name="frameRateDenominator"></param> /// <param name="format"></param> /// <param name="nodeContext"></param> public VideoFrame(Texture2D texture, NDIlib.FourCC_type_e fourCC, int frameRateNumerator, int frameRateDenominator, NDIlib.frame_format_type_e format, NodeContext nodeContext) { var provider = nodeContext.Factory.CreateService <IResourceProvider <Device> >(nodeContext); using var deviceHandle = provider.GetHandle(); var device = deviceHandle.Resource; int width = texture.Description.Width; int height = texture.Description.Height; int stride = width * SharpDX.DXGI.FormatHelper.SizeOfInBytes(texture.Description.Format); int bufferSize = height * stride; IntPtr videoBufferPtr = Marshal.AllocHGlobal(bufferSize); texture.CopyToPointer(device, videoBufferPtr, bufferSize); _ndiVideoFrame = new NDIlib.video_frame_v2_t() { xres = width, yres = height, FourCC = texture.Description.Format.ToFourCC(), frame_rate_N = frameRateNumerator, frame_rate_D = frameRateDenominator, picture_aspect_ratio = (float)width / height, frame_format_type = format, timecode = NDIlib.send_timecode_synthesize, p_data = videoBufferPtr, line_stride_in_bytes = stride, p_metadata = IntPtr.Zero, timestamp = 0 }; }
public void SendAsync(ref NDIlib.video_frame_v2_t videoFrame) { if (_sendInstancePtr == IntPtr.Zero) { return; } NDIlib.send_send_video_async_v2(_sendInstancePtr, ref videoFrame); }
/// <summary> /// Constructor that Takes an IImage /// </summary> /// <param name="image"></param> /// <param name="clone"></param> /// <param name="aspectRatio"></param> /// <param name="fourCC"></param> /// <param name="frameRateNumerator"></param> /// <param name="frameRateDenominator"></param> /// <param name="format"></param> public VideoFrame(IImage image, bool clone, float aspectRatio, NDIlib.FourCC_type_e fourCC, int frameRateNumerator, int frameRateDenominator, NDIlib.frame_format_type_e format) { var ar = aspectRatio; if (ar <= 0.0) { ar = (float)image.Info.Width / image.Info.Height; } int bufferSize = image.Info.ImageSize; IntPtr videoBufferPtr; if (clone) { // we have to know to free it later _memoryOwned = true; // allocate some memory for a video buffer videoBufferPtr = Marshal.AllocHGlobal(bufferSize); using (var handle = image.GetData().Bytes.Pin()) { unsafe { System.Buffer.MemoryCopy((void *)handle.Pointer, (void *)videoBufferPtr.ToPointer(), bufferSize, bufferSize); } } } else { _pinnedBytes = true; unsafe { _handle = image.GetData().Bytes.Pin(); //unpin when frame gets Disposed videoBufferPtr = (IntPtr)_handle.Pointer; } } _ndiVideoFrame = new NDIlib.video_frame_v2_t() { xres = image.Info.Width, yres = image.Info.Height, FourCC = fourCC, frame_rate_N = frameRateNumerator, frame_rate_D = frameRateDenominator, picture_aspect_ratio = ar, frame_format_type = format, timecode = NDIlib.send_timecode_synthesize, p_data = videoBufferPtr, line_stride_in_bytes = image.Info.ScanSize, p_metadata = IntPtr.Zero, timestamp = 0 }; }
private void Dispose(bool disposing) { if (disposing) { if (!disposed) { // tell the thread to exit exitThread = true; // wait for it to exit if (sendThread != null) { sendThread.Join(); sendThread = null; } // cause the pulling of frames to fail pendingFrames.CompleteAdding(); // clear any pending frames while (pendingFrames.Count > 0) { NDIlib.video_frame_v2_t discardFrame = pendingFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } pendingFrames.Dispose(); // Destroy the NDI sender if (sendInstancePtr != IntPtr.Zero) { NDIlib.send_destroy(sendInstancePtr); sendInstancePtr = IntPtr.Zero; } // Not required, but "correct". (see the SDK documentation) NDIlib.destroy(); srcBuffer = null; convertBuffer = null; disposed = true; } } }
public VideoFrame(IntPtr bufferPtr, int width, int height, int stride, NDIlib.FourCC_type_e fourCC, float aspectRatio, int frameRateNumerator, int frameRateDenominator, NDIlib.frame_format_type_e format) { _ndiVideoFrame = new NDIlib.video_frame_v2_t() { xres = width, yres = height, FourCC = fourCC, frame_rate_N = frameRateNumerator, frame_rate_D = frameRateDenominator, picture_aspect_ratio = aspectRatio, frame_format_type = format, timecode = NDIlib.send_timecode_synthesize, p_data = bufferPtr, line_stride_in_bytes = stride, p_metadata = IntPtr.Zero, timestamp = 0 }; }
public bool AddFrame(NDIlib.video_frame_v2_t frame) { try { pendingFrames.Add(frame); } catch (OperationCanceledException) { // we're shutting down pendingFrames.CompleteAdding(); return(false); } catch { return(false); } return(true); }
void Update() { var offs = Time.frameCount; for (var i = 0; i < _buffer.Length; i++) { _buffer[i] = (UInt32)((offs + i) * 0x010203); } var frame = new NDIlib.video_frame_v2_t { xres = 64, yres = 64, FourCC = NDIlib.FourCC_type_e.FourCC_type_RGBX, frame_format_type = NDIlib.frame_format_type_e.frame_format_type_progressive, p_data = Marshal.UnsafeAddrOfPinnedArrayElement(_buffer, 0), line_stride_in_bytes = 64 * 4 }; NDIlib.send_send_video_async_v2(_sendInstance, ref frame); }
public void Stop() { // tell the thread to exit exitThread = true; // Video CompositionTarget.Rendering -= OnCompositionTargetRendering; // Audio if (audioCapture != null) { audioCapture.StopRecording(); audioCapture = null; } // wait for it to exit if (sendThread != null) { sendThread.Join(); sendThread = null; } // cause the pulling of frames to fail pendingVideoFrames.CompleteAdding(); pendingAudioFrames.CompleteAdding(); // clear any pending video frames while (pendingVideoFrames.Count > 0) { NDIlib.video_frame_v2_t discardFrame = pendingVideoFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } // clear any pending audio frames while (pendingAudioFrames.Count > 0) { NDIlib.audio_frame_v2_t discardFrame = pendingAudioFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } }
bool TryCaptureFrame() { // Frame struct (unmanaged) var frame = new NDIlib.video_frame_v2_t(); // Try capturing a frame. var type = NDIlib.recv_capture_v2 (_ndiRecv, ref frame, IntPtr.Zero, IntPtr.Zero, 0); // Return if it isn't a video frame. if (type != NDIlib.frame_type_e.frame_type_video) { NDIlib.recv_free_video_v2(_ndiRecv, ref frame); return(false); } // Video frame information _width = frame.xres; _height = frame.yres; _pixelFormat = frame.FourCC.ToPixelFormat(); // Receive buffer preparation var count = Util.FrameDataCount(_width, _height, _pixelFormat); if (_received != null && _received.count != count) { _received.Dispose(); _received = null; } if (_received == null) { _received = new ComputeBuffer(count, 4); } // Receive buffer update _received.SetData(frame.p_data, count, 4); NDIlib.recv_free_video_v2(_ndiRecv, ref frame); return(true); }
unsafe void OnCompleteReadback(AsyncGPUReadbackRequest request) { if (_sendInstance == IntPtr.Zero) { return; } var ptr = (IntPtr)NativeArrayUnsafeUtility. GetUnsafeReadOnlyPtr(request.GetData <byte>()); var format = NDIlib.frame_format_type_e.frame_format_type_progressive; var frame = new NDIlib.video_frame_v2_t { xres = _width, yres = _height, FourCC = _pixelFormat.ToFourCC(), frame_format_type = format, p_data = ptr, line_stride_in_bytes = _width * 2 }; // Send via NDI NDIlib.send_send_video_async_v2(_sendInstance, ref frame); }
unsafe void Update() { if (_findInstance != IntPtr.Zero) { UInt32 count = 0; var sources = NDIlib.find_get_current_sources(_findInstance, ref count); if (count == 0) { return; } var source = UnsafeUtility.ReadArrayElement <NDIlib.source_t>((void *)sources, 0); Debug.Log("Sender found: " + Marshal.PtrToStringAnsi(source.p_ndi_name)); var recvOptions = new NDIlib.recv_create_v3_t { source_to_connect_to = source, color_format = NDIlib.recv_color_format_e.recv_color_format_fastest, bandwidth = NDIlib.recv_bandwidth_e.recv_bandwidth_highest }; _recvInstance = NDIlib.recv_create_v3(ref recvOptions); NDIlib.find_destroy(_findInstance); _findInstance = IntPtr.Zero; } if (_recvInstance != IntPtr.Zero) { var frame = new NDIlib.video_frame_v2_t(); var type = NDIlib.recv_capture_v2(_recvInstance, ref frame, IntPtr.Zero, IntPtr.Zero, 0); if (type != NDIlib.frame_type_e.frame_type_video) { return; } UpdateTexture(frame.xres, frame.yres, frame.p_data); NDIlib.recv_free_video_v2(_recvInstance, ref frame); } }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; if (bufferSize != buffer01Size) { buffer0 = Marshal.ReAllocCoTaskMem(buffer0, bufferSize); buffer1 = Marshal.ReAllocCoTaskMem(buffer1, bufferSize); buffer01Size = bufferSize; } // Copy data unsafe { byte *dst = (byte *)buffer0.ToPointer(); byte *src = (byte *)videoFrame.p_data.ToPointer(); for (int y = 0; y < yres; y++) { memcpy(dst, src, stride); dst += stride; src += stride; } } // swap IntPtr temp = buffer0; buffer0 = buffer1; buffer1 = temp; ImagingPixelFormat pixFmt; switch (videoFrame.FourCC) { case NDIlib.FourCC_type_e.FourCC_type_BGRA: pixFmt = PixelFormat.B8G8R8A8; break; case NDIlib.FourCC_type_e.FourCC_type_BGRX: pixFmt = PixelFormat.B8G8R8; break; case NDIlib.FourCC_type_e.FourCC_type_RGBA: pixFmt = PixelFormat.R8G8B8A8; break; case NDIlib.FourCC_type_e.FourCC_type_RGBX: pixFmt = PixelFormat.R8G8B8; break; default: pixFmt = PixelFormat.Unknown; // TODO: need to handle other video formats which are currently unsupported by IImage break; } var VideoFrameImage = buffer1.ToImage(bufferSize, xres, yres, pixFmt, videoFrame.FourCC.ToString()); videoFrames.OnNext(VideoFrameImage); // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); int channelStride = audioFrame.channel_stride_in_bytes; var floatBuffer = ConvertByteArrayToFloat(audBuffer, channelStride); float[] outBuffer = new float[512]; Buffer.BlockCopy(floatBuffer, 0, outBuffer, 0, 512); audioOutSignal.Read(outBuffer, 0, 512); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
// private void SendThreadProc() { // look for changes in tally bool lastProg = false; bool lastPrev = false; NDIlib.tally_t tally = new NDIlib.tally_t(); tally.on_program = lastProg; tally.on_preview = lastPrev; while (!exitThread) { if (Monitor.TryEnter(sendInstanceLock)) { // if this is not here, then we must be being reconfigured if (sendInstancePtr == null) { // unlock Monitor.Exit(sendInstanceLock); // give up some time Thread.Sleep(20); // loop again continue; } try { // get the next available frame NDIlib.video_frame_v2_t frame; if (pendingFrames.TryTake(out frame, 250)) { // this dropps frames if the UI is rendernig ahead of the specified NDI frame rate while (pendingFrames.Count > 1) { NDIlib.video_frame_v2_t discardFrame = pendingFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } // We now submit the frame. Note that this call will be clocked so that we end up submitting // at exactly the requested rate. // If WPF can't keep up with what you requested of NDI, then it will be sent at the rate WPF is rendering. //if (!isPausedValue) if (FInSend[0]) { NDIlib.send_send_video_v2(sendInstancePtr, ref frame); } // free the memory from this frame Marshal.FreeHGlobal(frame.p_data); } } catch (OperationCanceledException) { pendingFrames.CompleteAdding(); } catch { // } // unlock Monitor.Exit(sendInstanceLock); } else { Thread.Sleep(20); } // check tally NDIlib.send_get_tally(sendInstancePtr, ref tally, 0); // if tally changed trigger an update if (lastProg != tally.on_program || lastPrev != tally.on_preview) { // save the last values lastProg = tally.on_program; lastPrev = tally.on_preview; } } }
void updateSendBuffer() { FrameDurations.Add(DateTime.Now.Ticks - PreviousFrameTime); PreviousFrameTime = DateTime.Now.Ticks; Texture2D src = FInTexture[0][AssignedContext].Resource; int xres = src.Description.Width; int yres = src.Description.Height; // sanity if (sendInstancePtr == IntPtr.Zero || xres < 8 || yres < 8) { return; } // it seems 16px offset between AMD <-> NVIDIA stride = (xres * 32 /*BGRA bpp*/ + 7) / 8; //stride = xres * 4; // always 4bytes per pixel. (RGBA or BGRA or RGBX or BGRX) //stride = (xres * 32 + FInStride[0]) / 8; bufferSize = yres * stride; aspectRatio = (float)xres / (float)yres; // allocate some memory for a video buffer IntPtr bufferPtr = Marshal.AllocHGlobal(bufferSize); //FLogger.Log(LogType.Message, "updateSendBuffer: " + xres + "," + yres + "," + bufferSize); // We are going to create a progressive frame at 60Hz. NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t() { // Resolution xres = src.Description.Width, yres = src.Description.Height, // Use BGRA video //FourCC = NDIlib.FourCC_type_e.FourCC_type_RGBA, FourCC = (NDIlib.FourCC_type_e)Enum.ToObject(typeof(NDIlib.FourCC_type_e), (int)FInPixelFormat[0]), // The frame-rate frame_rate_N = FInFramerate[0] * 1000, frame_rate_D = 1000, // The aspect ratio picture_aspect_ratio = aspectRatio, // This is a progressive frame frame_format_type = NDIlib.frame_format_type_e.frame_format_type_progressive, // Timecode. timecode = NDIlib.send_timecode_synthesize, // The video memory used for this frame p_data = bufferPtr, // The line to line stride of this image line_stride_in_bytes = stride, // no metadata p_metadata = IntPtr.Zero, // only valid on received frames timestamp = 0 }; // copy data to buffer TextureToBuffer(src, ref bufferPtr); // add it to the output queue if (!AddFrame(videoFrame)) { FLogger.Log(LogType.Error, "failed to add video frame"); } }
// the receive thread runs though this loop until told to exit // AccessViolationException void ReceiveThreadProc() { while (!_exitThread && (_recvInstancePtr != IntPtr.Zero)) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // 何もしない break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (_videoEnabled == false || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. //double dpiX = 96.0 * ((double)videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; // We need to be on the UI thread to write to our bitmap // Not very efficient, but this is just an example Dispatcher.BeginInvoke(new Action(delegate { if (_videoEnabled == false) { return; } // resize the writeable if needed if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres) { VideoBitmap = new WriteableBitmap(xres, yres, 96.0, 96.0, PixelFormats.Pbgra32, null); VideoSurface.Source = VideoBitmap; } try { VideoBitmap.Lock(); IntPtr pBackBuffer = VideoBitmap.BackBuffer; try { CopyMemory(pBackBuffer, videoFrame.p_data, (uint)bufferSize); } catch { return; } // Specify the area of the bitmap that changed. VideoBitmap.AddDirtyRect(new Int32Rect(0, 0, xres, yres)); } finally { VideoBitmap.Unlock(); } // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); })); #if DEBUG // フレームレートを計算 DateTime now = DateTime.Now; TimeSpan dist = now - last; if (dist.Seconds > 0) { Console.WriteLine("フレームレート:<1"); } else { if (dist.Milliseconds == 0) { Console.WriteLine("フレームレート:∞"); } else { int frame = 1000 / dist.Milliseconds; Console.WriteLine("フレームレート:{0}", frame.ToString()); } } last = now; #endif break; case NDIlib.frame_type_e.frame_type_audio: // 何もしない // ここで解放しておかないとメモリの中に残るので注意 NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; case NDIlib.frame_type_e.frame_type_metadata: // 何もしない NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
public void OnImportsSatisfied() { frameNumber = 0; // .Net interop doesn't handle UTF-8 strings, so do it manually // These must be freed later sourceNamePtr = UTF.StringToUtf8("VVVV.NDI.SendTest"); groupsNamePtr = IntPtr.Zero; // Not required, but "correct". (see the SDK documentation) if (!NDIlib.initialize()) { // Cannot run NDI. Most likely because the CPU is not sufficient (see SDK documentation). // you can check this directly with a call to NDIlib_is_supported_CPU() FLogger.Log(LogType.Error, "Cannot run NDI"); return; } else { FLogger.Log(LogType.Message, "NDI initialized"); } // Create an NDI source description using sourceNamePtr and it's clocked to the video. NDIlib.send_create_t createDesc = new NDIlib.send_create_t() { p_ndi_name = sourceNamePtr, p_groups = groupsNamePtr, clock_video = true, clock_audio = false }; // We create the NDI finder instance sendInstancePtr = NDIlib.send_create(ref createDesc); // free the strings we allocated Marshal.FreeHGlobal(sourceNamePtr); Marshal.FreeHGlobal(groupsNamePtr); // did it succeed? if (sendInstancePtr == IntPtr.Zero) { FLogger.Log(LogType.Error, "Failed to create send instance"); //Console.WriteLine("Failed to create send instance"); return; } else { FLogger.Log(LogType.Message, "Successed to create send instance"); } // define our bitmap properties int xres = 1920; int yres = 1080; int stride = (xres * 32 /*BGRA bpp*/ + 7) / 8; int bufferSize = yres * stride; // allocate some memory for a video buffer bufferPtr = Marshal.AllocHGlobal((int)bufferSize); // We are going to create a 1920x1080 progressive frame at 29.97Hz. videoFrame = new NDIlib.video_frame_v2_t() { // Resolution xres = xres, yres = yres, // Use BGRA video FourCC = NDIlib.FourCC_type_e.FourCC_type_BGRA, // The frame-eate frame_rate_N = 120000, //30000, frame_rate_D = 1000, //1001, // The aspect ratio (16:9) picture_aspect_ratio = (16.0f / 9.0f), // This is a progressive frame frame_format_type = NDIlib.frame_format_type_e.frame_format_type_progressive, // Timecode. timecode = NDIlib.send_timecode_synthesize, // The video memory used for this frame p_data = bufferPtr, // The line to line stride of this image line_stride_in_bytes = stride, // no metadata p_metadata = IntPtr.Zero, // only valid on received frames timestamp = 0//NDIlib.recv_timestamp_undefined }; // get a compatible bitmap and graphics context bmp = new Bitmap((int)xres, (int)yres, (int)stride, System.Drawing.Imaging.PixelFormat.Format32bppPArgb, bufferPtr); graphics = Graphics.FromImage(bmp); graphics.SmoothingMode = SmoothingMode.AntiAlias; // We'll use these later inside the loop textFormat = new StringFormat(); textFormat.Alignment = StringAlignment.Center; textFormat.LineAlignment = StringAlignment.Center; fontFamily = new FontFamily("Arial"); outlinePen = new Pen(Color.Black, 2.0f); thinOutlinePen = new Pen(Color.Black, 1.0f); }
/// <summary> /// the receive thread runs though this loop until told to exit /// </summary> void ReceiveThreadProc() { bool newVideo = true; using var deviceHandle = deviceProvider.GetHandle(); var device = deviceHandle.Resource; while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; if (bufferSize != buffer01Size) { buffer0 = Marshal.ReAllocCoTaskMem(buffer0, bufferSize); buffer1 = Marshal.ReAllocCoTaskMem(buffer1, bufferSize); buffer01Size = bufferSize; } // Copy data unsafe { byte *dst = (byte *)buffer0.ToPointer(); byte *src = (byte *)videoFrame.p_data.ToPointer(); for (int y = 0; y < yres; y++) { memcpy(dst, src, stride); dst += stride; src += stride; } } // swap IntPtr temp = buffer0; buffer0 = buffer1; buffer1 = temp; SharpDX.DXGI.Format texFmt; switch (videoFrame.FourCC) { case NDIlib.FourCC_type_e.FourCC_type_BGRA: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_BGRX: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_RGBA: texFmt = SharpDX.DXGI.Format.R8G8B8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_RGBX: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; default: texFmt = SharpDX.DXGI.Format.Unknown; // TODO: need to handle other video formats break; } if (newVideo) // it's the first time we enter the while loop, so cerate a new texture { textureDesc = new Texture2DDescription() { Width = xres, Height = yres, MipLevels = 1, ArraySize = 1, Format = texFmt, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; outputTexture = new Texture2D(device, textureDesc); newVideo = false; } try { DataBox srcBox = new DataBox(buffer1); device.ImmediateContext.UpdateSubresource(srcBox, outputTexture, 0); videoFrames.OnNext(outputTexture); } finally { device.ImmediateContext.UnmapSubresource(outputTexture, 0); } // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } //// if the audio format changed, we need to reconfigure the audio device //bool formatChanged = false; //// make sure our format has been created and matches the incomming audio //if (_waveFormat == null || // _waveFormat.Channels != audioFrame.no_channels || // _waveFormat.SampleRate != audioFrame.sample_rate) //{ // //// Create a wavformat that matches the incomming frames // //_waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels); // formatChanged = true; //} //// set up our audio buffer if needed //if (_bufferedProvider == null || formatChanged) //{ // _bufferedProvider = new BufferedWaveProvider(_waveFormat); // _bufferedProvider.DiscardOnBufferOverflow = true; //} //// set up our multiplexer used to mix down to 2 output channels) //if (_multiplexProvider == null || formatChanged) //{ // _multiplexProvider = new MultiplexingWaveProvider(new List<IWaveProvider>() { _bufferedProvider }, 2); //} // // set up our audio output device // if (_wasapiOut == null || formatChanged) // { // // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example. // // This is close enough to show that audio is received and converted correctly. // _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50); // _wasapiOut.Init(_multiplexProvider); // _wasapiOut.Volume = _volume; // _wasapiOut.Play(); // } // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); //// push the byte[] buffer into the bufferedProvider for output //_bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
// the receive thread runs though this loop until told to exit private void SendThreadProc() { // look for changes in tally bool lastProg = false; bool lastPrev = false; NDIlib.tally_t tally = new NDIlib.tally_t { on_program = lastProg, on_preview = lastPrev }; while (!exitThread) { if (Monitor.TryEnter(sendInstanceLock)) { // if this is not here, then we must be being reconfigured if (sendInstancePtr == null) { // unlock Monitor.Exit(sendInstanceLock); // give up some time Thread.Sleep(20); // loop again continue; } // Audio should be send first if (audioEnabled) { try { // get the next available frame if (pendingAudioFrames.TryTake(out NDIlib.audio_frame_v2_t frame, 250)) { // Submit the audio buffer if (!IsSendPaused) { NDIlib.send_send_audio_v2(sendInstancePtr, ref frame); } // free the memory from this frame Marshal.FreeHGlobal(frame.p_data); } } catch (OperationCanceledException) { pendingAudioFrames.CompleteAdding(); } catch {} } try { // get the next available frame if (pendingVideoFrames.TryTake(out NDIlib.video_frame_v2_t frame, 250)) { // this drops frames if the UI is rendering ahead of the specified NDI frame rate while (pendingVideoFrames.Count > 1) { NDIlib.video_frame_v2_t discardFrame = pendingVideoFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } // We now submit the frame. Note that this call will be clocked so that we end up submitting // at exactly the requested rate. // If WPF can't keep up with what you requested of NDI, then it will be sent at the rate WPF is rendering. if (!IsSendPaused) { NDIlib.send_send_video_v2(sendInstancePtr, ref frame); } // free the memory from this frame Marshal.FreeHGlobal(frame.p_data); } } catch (OperationCanceledException) { pendingVideoFrames.CompleteAdding(); } catch {} // unlock Monitor.Exit(sendInstanceLock); } else { Thread.Sleep(20); } // check tally NDIlib.send_get_tally(sendInstancePtr, ref tally, 0); // if tally changed trigger an update if (lastProg != tally.on_program || lastPrev != tally.on_preview) { // save the last values lastProg = tally.on_program; lastPrev = tally.on_preview; // set these on the UI thread Application.Current.Dispatcher.BeginInvoke(new Action(() => { IsOnProgram = lastProg; IsOnPreview = lastPrev; })); } } }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { int receivedFrames = 0; while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: break; // Video data case NDIlib.frame_type_e.frame_type_video: receivedFrames++; // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; Bitmap NDIRecvd = new Bitmap(xres, yres, stride, System.Drawing.Imaging.PixelFormat.Format32bppPArgb, videoFrame.p_data); /* this is done in apply resolution bias, but we need really small texture*/ lock (ReceiverLock) { if (_bitmap != null) { _bitmap.Dispose(); } _bitmap = new Bitmap(xres / 4, yres / 4); using (var g = Graphics.FromImage(_bitmap)) { g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.Bilinear; g.DrawImage(NDIRecvd, 0, 0, _bitmap.Width, _bitmap.Height); } NDIRecvd.Dispose(); } NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is not used case NDIlib.frame_type_e.frame_type_audio: // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
internal VideoFrame(NDIlib.video_frame_v2_t ndiVideoFrame) { _memoryOwned = false; _ndiVideoFrame = ndiVideoFrame; }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (((double)xres / (double)yres) / videoFrame.picture_aspect_ratio); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; // We need to be on the UI thread to write to our bitmap // Not very efficient, but this is just an example Dispatcher.BeginInvoke(new Action(delegate { // resize the writeable if needed if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres || Math.Abs(VideoBitmap.DpiX - dpiX) > 0.001) { VideoBitmap = null; GC.Collect(1); VideoBitmap = new WriteableBitmap(xres, yres, dpiX, 96.0, PixelFormats.Pbgra32, null); VideoSurface.Source = VideoBitmap; } // update the writeable bitmap VideoBitmap.WritePixels(new Int32Rect(0, 0, xres, yres), videoFrame.p_data, bufferSize, stride); // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); })); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // if the audio format changed, we need to reconfigure the audio device bool formatChanged = false; // make sure our format has been created and matches the incomming audio if (_waveFormat == null || _waveFormat.Channels != audioFrame.no_channels || _waveFormat.SampleRate != audioFrame.sample_rate) { // Create a wavformat that matches the incomming frames _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels); formatChanged = true; } // set up our audio buffer if needed if (_bufferedProvider == null || formatChanged) { _bufferedProvider = new BufferedWaveProvider(_waveFormat); _bufferedProvider.DiscardOnBufferOverflow = true; } // set up our multiplexer used to mix down to 2 output channels) if (_multiplexProvider == null || formatChanged) { _multiplexProvider = new MultiplexingWaveProvider(new List <IWaveProvider>() { _bufferedProvider }, 2); } // set up our audio output device if (_haveAudioDevice && (_wasapiOut == null || formatChanged)) { try { // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example. // This is close enough to show that audio is received and converted correctly. _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50); _wasapiOut.Init(_multiplexProvider); _wasapiOut.Volume = _volume; _wasapiOut.Play(); } catch { // if this fails, assume that there is no audio device on the system // so that we don't retry/catch on every audio frame received _haveAudioDevice = false; } } // did we get a device? if (_haveAudioDevice && _wasapiOut != null) { // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // push the byte[] buffer into the bufferedProvider for output _bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes); } // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
protected virtual void Dispose(bool disposing) { if (!_disposed) { // clean up the audio capture if needed if (audioCap != null) { audioCap.StopRecording(); // have to let it stop while (audioCap.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped) { Thread.Sleep(10); } audioCap.Dispose(); audioCap = null; } // free allocated frame if needed if (audioFrame.p_data != IntPtr.Zero) { Marshal.FreeHGlobal(audioFrame.p_data); audioFrame.p_data = IntPtr.Zero; } if (disposing) { // tell the thread to exit exitThread = true; // wait for it to exit if (sendThread != null) { sendThread.Join(); sendThread = null; } // cause the pulling of frames to fail pendingFrames.CompleteAdding(); // clear any pending frames while (pendingFrames.Count > 0) { NDIlib.video_frame_v2_t discardFrame = pendingFrames.Take(); Marshal.FreeHGlobal(discardFrame.p_data); } pendingFrames.Dispose(); } // Destroy the NDI sender if (sendInstancePtr != IntPtr.Zero) { NDIlib.send_destroy(sendInstancePtr); sendInstancePtr = IntPtr.Zero; } // Not required, but "correct". (see the SDK documentation) NDIlib.destroy(); _disposed = true; } }
private void OnCompositionTargetRendering(object sender, EventArgs e) { if (IsSendPaused) { return; } if (System.ComponentModel.DesignerProperties.GetIsInDesignMode(this)) { return; } int xres = NdiWidth; int yres = NdiHeight; int frNum = NdiFrameRateNumerator; int frDen = NdiFrameRateDenominator; // sanity if (sendInstancePtr == IntPtr.Zero || xres < 8 || yres < 8) { return; } if (targetBitmap == null || targetBitmap.PixelWidth != xres || targetBitmap.PixelHeight != yres) { // Create a properly sized RenderTargetBitmap targetBitmap = new RenderTargetBitmap(xres, yres, 96, 96, PixelFormats.Pbgra32); fmtConvertedBmp = new FormatConvertedBitmap(); fmtConvertedBmp.BeginInit(); fmtConvertedBmp.Source = targetBitmap; fmtConvertedBmp.DestinationFormat = PixelFormats.Bgra32; fmtConvertedBmp.EndInit(); } // clear to prevent trails targetBitmap.Clear(); // render the content into the bitmap targetBitmap.Render(this.Child); stride = (xres * 32 /*BGRA bpp*/ + 7) / 8; bufferSize = yres * stride; aspectRatio = (float)xres / (float)yres; // allocate some memory for a video buffer IntPtr bufferPtr = Marshal.AllocHGlobal(bufferSize); // We are going to create a progressive frame at 60Hz. NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t() { // Resolution xres = NdiWidth, yres = NdiHeight, // Use BGRA video FourCC = NDIlib.FourCC_type_e.FourCC_type_BGRA, // The frame-eate frame_rate_N = frNum, frame_rate_D = frDen, // The aspect ratio picture_aspect_ratio = aspectRatio, // This is a progressive frame frame_format_type = NDIlib.frame_format_type_e.frame_format_type_progressive, // Timecode. timecode = NDIlib.send_timecode_synthesize, // The video memory used for this frame p_data = bufferPtr, // The line to line stride of this image line_stride_in_bytes = stride, // no metadata p_metadata = IntPtr.Zero, // only valid on received frames timestamp = 0 }; if (UnPremultiply && fmtConvertedBmp != null) { fmtConvertedBmp.CopyPixels(new Int32Rect(0, 0, xres, yres), bufferPtr, bufferSize, stride); } else { // copy the pixels into the buffer targetBitmap.CopyPixels(new Int32Rect(0, 0, xres, yres), bufferPtr, bufferSize, stride); } // add it to the output queue AddFrame(videoFrame); }
void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled or lockBuffer flag = true, just discard // this can also occasionally happen when changing sources if (videoFrame.p_data == IntPtr.Zero || lockBuffer) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // check receive data //FLogger.Log(LogType.Message, // "FourCC: " + Enum.GetName(typeof(NDIlib.FourCC_type_e), videoFrame.FourCC) // + ", frame_format_type: " + Enum.GetName(typeof(NDIlib.frame_format_type_e), videoFrame.frame_format_type) // + ", frameRate_D: " + videoFrame.frame_rate_D // + ", frameRate_N: " + videoFrame.frame_rate_N // + ", line_stride_in_bytes: " + videoFrame.line_stride_in_bytes // + ", picture_aspect_ratio: " + videoFrame.picture_aspect_ratio // + ", xres: " + videoFrame.xres // + ", yres: " + videoFrame.yres //); // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; width = xres; height = yres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. //double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int size = yres * stride; // allocate some memory for a video buffer if (bufferSize != size) { if (buffer_ptr != IntPtr.Zero) { Marshal.FreeHGlobal(buffer_ptr); } buffer_ptr = Marshal.AllocHGlobal((int)size); bufferSize = size; } // copy frame data CopyMemory(buffer_ptr, videoFrame.p_data, bufferSize); // free frames that were received NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); // set flag for update texture //invalidate = true; break; // not support audio case NDIlib.frame_type_e.frame_type_audio: // always free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received ReceivingFrames = false; break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: Connected = true; ReceivingFrames = true; // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } VideoFrameReceivedEventArgs videoArgs = new VideoFrameReceivedEventArgs(); videoArgs.Frame = new VideoFrame(videoFrame); VideoFrameReceived?.Invoke(this, videoArgs); // free frames that were received AFTER use! NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: Connected = true; ReceivingFrames = true; // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 16 bit sample into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(short); // NDI uses planar, but we'll return interleaved which Pcm uses by default. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_16s_t interleavedFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to 16 bit interleaved audio NDIlib.util_audio_to_interleaved_16s_v2(ref audioFrame, ref interleavedFrame); AudioFrameReceivedEventArgs audioArgs = new AudioFrameReceivedEventArgs(); audioArgs.Frame = new AudioFrame16bpp(interleavedFrame); AudioFrameReceived?.Invoke(this, audioArgs); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; case NDIlib.frame_type_e.frame_type_error: Connected = false; break; } } }