private void BitmapToImageSource(Bitmap bitmap) { if (isDecodingThreadRunning) { using (var memory = new MemoryStream()) { try { bitmap.Save(memory, ImageFormat.Bmp); memory.Position = 0; BitmapImage bitmapimage = new BitmapImage(); bitmapimage.BeginInit(); bitmapimage.CacheOption = BitmapCacheOption.OnLoad; bitmapimage.StreamSource = memory; bitmapimage.EndInit(); bitmapimage.Freeze(); VideoFrameReceived?.Invoke(bitmapimage); memory.Dispose(); } catch (NullReferenceException ex) { Debug.WriteLine(ex.ToString()); } } } }
private void VideoFrameReceivedHandler(object sender, ToxAvEventArgs.VideoFrameEventArgs e) { VideoFrameReceived?.Invoke(this, e); }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received ReceivingFrames = false; break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: Connected = true; ReceivingFrames = true; // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } VideoFrameReceivedEventArgs videoArgs = new VideoFrameReceivedEventArgs(); videoArgs.Frame = new VideoFrame(videoFrame); VideoFrameReceived?.Invoke(this, videoArgs); // free frames that were received AFTER use! NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: Connected = true; ReceivingFrames = true; // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 16 bit sample into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(short); // NDI uses planar, but we'll return interleaved which Pcm uses by default. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_16s_t interleavedFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to 16 bit interleaved audio NDIlib.util_audio_to_interleaved_16s_v2(ref audioFrame, ref interleavedFrame); AudioFrameReceivedEventArgs audioArgs = new AudioFrameReceivedEventArgs(); audioArgs.Frame = new AudioFrame16bpp(interleavedFrame); AudioFrameReceived?.Invoke(this, audioArgs); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; case NDIlib.frame_type_e.frame_type_error: Connected = false; break; } } }