private void ReceiveThreadProc() { var recvInstance = _ndiReceiveInstance; if (recvInstance == IntPtr.Zero) { return; } while (!_exitReceiveThread) { NDIlib_video_frame_t videoFrame = new NDIlib_video_frame_t(); NDIlib_audio_frame_t audioFrame = new NDIlib_audio_frame_t(); NDIlib_metadata_frame_t metadataFrame = new NDIlib_metadata_frame_t(); switch (Ndi.NDIlib_recv_capture(recvInstance, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { case NDIlib_frame_type_e.NDIlib_frame_type_video: if (videoFrame.p_data == IntPtr.Zero) { Ndi.NDIlib_recv_free_video(recvInstance, ref videoFrame); break; } int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; double dpiY = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; Application.Current?.Dispatcher.BeginInvoke(new Action(delegate { if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres) { VideoBitmap = new WriteableBitmap(xres, yres, 96, dpiY, System.Windows.Media.PixelFormats.Pbgra32, null); } // update the writeable bitmap VideoBitmap.Lock(); VideoBitmap.WritePixels(new Int32Rect(0, 0, xres, yres), videoFrame.p_data, bufferSize, stride); VideoBitmap.Unlock(); Ndi.NDIlib_recv_free_video(recvInstance, ref videoFrame); })); break; case NDIlib_frame_type_e.NDIlib_frame_type_audio: Ndi.NDIlib_recv_free_audio(recvInstance, ref audioFrame); break; case NDIlib_frame_type_e.NDIlib_frame_type_metadata: Ndi.NDIlib_recv_free_metadata(recvInstance, ref metadataFrame); break; } } Debug.WriteLine(this, "Receive thread exited"); }
// the receive thread runs though this loop until told to exit // AccessViolationException void ReceiveThreadProc() { while (!_exitThread && (_recvInstancePtr != IntPtr.Zero)) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // 何もしない break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (_videoEnabled == false || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. //double dpiX = 96.0 * ((double)videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; // We need to be on the UI thread to write to our bitmap // Not very efficient, but this is just an example Dispatcher.BeginInvoke(new Action(delegate { if (_videoEnabled == false) { return; } // resize the writeable if needed if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres) { VideoBitmap = new WriteableBitmap(xres, yres, 96.0, 96.0, PixelFormats.Pbgra32, null); VideoSurface.Source = VideoBitmap; } try { VideoBitmap.Lock(); IntPtr pBackBuffer = VideoBitmap.BackBuffer; try { CopyMemory(pBackBuffer, videoFrame.p_data, (uint)bufferSize); } catch { return; } // Specify the area of the bitmap that changed. VideoBitmap.AddDirtyRect(new Int32Rect(0, 0, xres, yres)); } finally { VideoBitmap.Unlock(); } // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); })); #if DEBUG // フレームレートを計算 DateTime now = DateTime.Now; TimeSpan dist = now - last; if (dist.Seconds > 0) { Console.WriteLine("フレームレート:<1"); } else { if (dist.Milliseconds == 0) { Console.WriteLine("フレームレート:∞"); } else { int frame = 1000 / dist.Milliseconds; Console.WriteLine("フレームレート:{0}", frame.ToString()); } } last = now; #endif break; case NDIlib.frame_type_e.frame_type_audio: // 何もしない // ここで解放しておかないとメモリの中に残るので注意 NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; case NDIlib.frame_type_e.frame_type_metadata: // 何もしない NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; // We need to be on the UI thread to write to our bitmap // Not very efficient, but this is just an example Dispatcher.BeginInvoke(new Action(delegate { // resize the writeable if needed if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres || Math.Abs(VideoBitmap.DpiX - dpiX) > 0.001) { VideoBitmap = null; GC.Collect(1); VideoBitmap = new WriteableBitmap(xres, yres, dpiX, 96.0, PixelFormats.Pbgra32, null); VideoSurface.Source = VideoBitmap; } VideoBitmap.Lock(); // update the writeable bitmap VideoBitmap.WritePixels(new Int32Rect(0, 0, xres, yres), videoFrame.p_data, bufferSize, stride, 0, 0); VideoBitmap.Unlock(); //GC.Collect(1); // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); })); break; // audio is beyond the scope of this example //ignore audio case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // if the audio format changed, we need to reconfigure the audio device bool formatChanged = false; // make sure our format has been created and matches the incomming audio if (_waveFormat == null || _waveFormat.Channels != audioFrame.no_channels || _waveFormat.SampleRate != audioFrame.sample_rate) { // Create a wavformat that matches the incomming frames _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels); formatChanged = true; } // set up our audio buffer if needed if (_bufferedProvider == null || formatChanged) { _bufferedProvider = new BufferedWaveProvider(_waveFormat) { DiscardOnBufferOverflow = true }; } // set up our multiplexer used to mix down to 2 output channels) if (_multiplexProvider == null || formatChanged) { _multiplexProvider = new MultiplexingWaveProvider(new List <IWaveProvider>() { _bufferedProvider }, 2); } // set up our audio output device if (_haveAudioDevice && (_wasapiOut == null || formatChanged)) { try { // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example. // This is close enough to show that audio is received and converted correctly. _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50); _wasapiOut.Init(_multiplexProvider); _wasapiOut.Volume = _volume; _wasapiOut.Play(); } catch { // if this fails, assume that there is no audio device on the system // so that we don't retry/catch on every audio frame received _haveAudioDevice = false; } } // did we get a device? if (_haveAudioDevice && _wasapiOut != null) { // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // push the byte[] buffer into the bufferedProvider for output _bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes); } // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }