// This will allow you to receive video, audio and metadata frames. // Any of the buffers can be NULL, in which case data of that type // will not be captured in this call. This call can be called simultaneously // on separate threads, so it is entirely possible to receive audio, video, metadata // all on separate threads. This function will return NDIlib_frame_type_none if no // data is received within the specified timeout and NDIlib_frame_type_error if the connection is lost. // Buffers captured with this must be freed with the appropriate free function below. public static frame_type_e recv_capture_v2(IntPtr p_instance, ref video_frame_v2_t p_video_data, ref audio_frame_v2_t p_audio_data, ref metadata_frame_t p_metadata, UInt32 timeout_in_ms) { if (IntPtr.Size == 8) { return(UnsafeNativeMethods.recv_capture_v2_64(p_instance, ref p_video_data, ref p_audio_data, ref p_metadata, timeout_in_ms)); } else { return(UnsafeNativeMethods.recv_capture_v2_32(p_instance, ref p_video_data, ref p_audio_data, ref p_metadata, timeout_in_ms)); } }
// Free the buffers returned by capture for video public static void recv_free_video_v2(IntPtr p_instance, ref video_frame_v2_t p_video_data) { if (IntPtr.Size == 8) { UnsafeNativeMethods.recv_free_video_v2_64(p_instance, ref p_video_data); } else { UnsafeNativeMethods.recv_free_video_v2_32(p_instance, ref p_video_data); } }
// This will add a video frame and will return immediately, having scheduled the frame to be displayed. // All processing and sending of the video will occur asynchronously. The memory accessed by NDIlib_video_frame_t // cannot be freed or re-used by the caller until a synchronizing event has occurred. In general the API is better // able to take advantage of asynchronous processing than you might be able to by simple having a separate thread // to submit frames. // // This call is particularly beneficial when processing BGRA video since it allows any color conversion, compression // and network sending to all be done on separate threads from your main rendering thread. // // Synchronozing events are : // - a call to NDIlib_send_send_video // - a call to NDIlib_send_send_video_async with another frame to be sent // - a call to NDIlib_send_send_video with p_video_data=NULL // - a call to NDIlib_send_destroy public static void send_send_video_async_v2(IntPtr p_instance, ref video_frame_v2_t p_video_data) { if (IntPtr.Size == 8) { UnsafeNativeMethods.send_send_video_async_v2_64(p_instance, ref p_video_data); } else { UnsafeNativeMethods.send_send_video_async_v2_32(p_instance, ref p_video_data); } }
// This converts from 16bit semi-planar to 10bit. You must make sure that there is memory and a stride allocated in p_dst. public static void util_P216_to_V210(ref video_frame_v2_t p_src_p216, ref video_frame_v2_t p_dst_v210) { if (IntPtr.Size == 8) { UnsafeNativeMethods.util_P216_to_V210_64(ref p_src_p216, ref p_dst_v210); } else { UnsafeNativeMethods.util_P216_to_V210_32(ref p_src_p216, ref p_dst_v210); } }
internal static extern void recv_free_video_v2_32(IntPtr p_instance, ref video_frame_v2_t p_video_data);
internal static extern frame_type_e recv_capture_v2_32(IntPtr p_instance, ref video_frame_v2_t p_video_data, ref audio_frame_v2_t p_audio_data, ref metadata_frame_t p_metadata, UInt32 timeout_in_ms);
internal static extern void send_send_video_async_v2_32(IntPtr p_instance, ref video_frame_v2_t p_video_data);
public static extern void send_send_video_v2(IntPtr p_instance, ref video_frame_v2_t p_video_data);
internal static extern void util_P216_to_V210_32(ref video_frame_v2_t p_src, ref video_frame_v2_t p_dst_p216);