// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (((double)xres / (double)yres) / videoFrame.picture_aspect_ratio); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; // We need to be on the UI thread to write to our bitmap // Not very efficient, but this is just an example Dispatcher.BeginInvoke(new Action(delegate { // resize the writeable if needed if (VideoBitmap == null || VideoBitmap.PixelWidth != xres || VideoBitmap.PixelHeight != yres || Math.Abs(VideoBitmap.DpiX - dpiX) > 0.001) { VideoBitmap = null; GC.Collect(1); VideoBitmap = new WriteableBitmap(xres, yres, dpiX, 96.0, PixelFormats.Pbgra32, null); VideoSurface.Source = VideoBitmap; } // update the writeable bitmap VideoBitmap.WritePixels(new Int32Rect(0, 0, xres, yres), videoFrame.p_data, bufferSize, stride); // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); })); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // if the audio format changed, we need to reconfigure the audio device bool formatChanged = false; // make sure our format has been created and matches the incomming audio if (_waveFormat == null || _waveFormat.Channels != audioFrame.no_channels || _waveFormat.SampleRate != audioFrame.sample_rate) { // Create a wavformat that matches the incomming frames _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels); formatChanged = true; } // set up our audio buffer if needed if (_bufferedProvider == null || formatChanged) { _bufferedProvider = new BufferedWaveProvider(_waveFormat); _bufferedProvider.DiscardOnBufferOverflow = true; } // set up our multiplexer used to mix down to 2 output channels) if (_multiplexProvider == null || formatChanged) { _multiplexProvider = new MultiplexingWaveProvider(new List <IWaveProvider>() { _bufferedProvider }, 2); } // set up our audio output device if (_haveAudioDevice && (_wasapiOut == null || formatChanged)) { try { // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example. // This is close enough to show that audio is received and converted correctly. _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50); _wasapiOut.Init(_multiplexProvider); _wasapiOut.Volume = _volume; _wasapiOut.Play(); } catch { // if this fails, assume that there is no audio device on the system // so that we don't retry/catch on every audio frame received _haveAudioDevice = false; } } // did we get a device? if (_haveAudioDevice && _wasapiOut != null) { // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // push the byte[] buffer into the bufferedProvider for output _bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes); } // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
private void AudioCap_DataAvailable(object sender, WaveInEventArgs e) { if (isPausedValue || sendInstancePtr == IntPtr.Zero) { return; } // how many samples? int numSamples = (e.BytesRecorded / (audioNumChannels * audioSampleSizeInBytes)); // how much float buffer will this need? int bufferSizeNeeded = numSamples * audioNumChannels * sizeof(float); // is our audio frame big enough? too big is fine if (audioBufferSize < bufferSizeNeeded || audioFrame.p_data == IntPtr.Zero) { if (audioFrame.p_data != null) { Marshal.FreeHGlobal(audioFrame.p_data); audioFrame.p_data = IntPtr.Zero; } audioFrame.p_data = Marshal.AllocHGlobal(bufferSizeNeeded); audioBufferSize = bufferSizeNeeded; } // set these every time because why not? audioFrame.sample_rate = audioSampleRate; audioFrame.no_channels = audioNumChannels; audioFrame.no_samples = numSamples; // pin the byte[] audio received and get a GC handle to it GCHandle interleavedHandle = GCHandle.Alloc(e.Buffer, GCHandleType.Pinned); if (audioSampleSizeInBytes == 2) { // make an temporary interleaved NDI audio frame around the received samples NDIlib.audio_frame_interleaved_16s_t interleavedShortFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioSampleRate, no_channels = audioNumChannels, no_samples = numSamples, p_data = interleavedHandle.AddrOfPinnedObject() }; // Convert from s16 interleaved to float planar audio NDIlib.util_audio_from_interleaved_16s_v2(ref interleavedShortFrame, ref audioFrame); } else if (audioSampleSizeInBytes == 4) { // make an temporary interleaved NDI audio frame around the received samples NDIlib.audio_frame_interleaved_32f_t interleavedFloatFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioSampleRate, no_channels = audioNumChannels, no_samples = numSamples, p_data = interleavedHandle.AddrOfPinnedObject() }; // Convert from float interleaved to float planar audio NDIlib.util_audio_from_interleaved_32f_v2(ref interleavedFloatFrame, ref audioFrame); } else { System.Diagnostics.Debug.Assert(false, "Unexpected audio sample size."); } // release the GC pinning of the byte[]'s interleavedHandle.Free(); Monitor.Enter(sendInstanceLock); // send the planar frame if (sendInstancePtr != IntPtr.Zero) { if (!IsSendPaused) { NDIlib.send_send_audio_v2(sendInstancePtr, ref audioFrame); } } Monitor.Exit(sendInstanceLock); }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; if (bufferSize != buffer01Size) { buffer0 = Marshal.ReAllocCoTaskMem(buffer0, bufferSize); buffer1 = Marshal.ReAllocCoTaskMem(buffer1, bufferSize); buffer01Size = bufferSize; } // Copy data unsafe { byte *dst = (byte *)buffer0.ToPointer(); byte *src = (byte *)videoFrame.p_data.ToPointer(); for (int y = 0; y < yres; y++) { memcpy(dst, src, stride); dst += stride; src += stride; } } // swap IntPtr temp = buffer0; buffer0 = buffer1; buffer1 = temp; ImagingPixelFormat pixFmt; switch (videoFrame.FourCC) { case NDIlib.FourCC_type_e.FourCC_type_BGRA: pixFmt = PixelFormat.B8G8R8A8; break; case NDIlib.FourCC_type_e.FourCC_type_BGRX: pixFmt = PixelFormat.B8G8R8; break; case NDIlib.FourCC_type_e.FourCC_type_RGBA: pixFmt = PixelFormat.R8G8B8A8; break; case NDIlib.FourCC_type_e.FourCC_type_RGBX: pixFmt = PixelFormat.R8G8B8; break; default: pixFmt = PixelFormat.Unknown; // TODO: need to handle other video formats which are currently unsupported by IImage break; } var VideoFrameImage = buffer1.ToImage(bufferSize, xres, yres, pixFmt, videoFrame.FourCC.ToString()); videoFrames.OnNext(VideoFrameImage); // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); int channelStride = audioFrame.channel_stride_in_bytes; var floatBuffer = ConvertByteArrayToFloat(audBuffer, channelStride); float[] outBuffer = new float[512]; Buffer.BlockCopy(floatBuffer, 0, outBuffer, 0, 512); audioOutSignal.Read(outBuffer, 0, 512); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }
/// <summary> /// the receive thread runs though this loop until told to exit /// </summary> void ReceiveThreadProc() { bool newVideo = true; using var deviceHandle = deviceProvider.GetHandle(); var device = deviceHandle.Resource; while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } // get all our info so that we can free the frame int yres = (int)videoFrame.yres; int xres = (int)videoFrame.xres; // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc. double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres)); int stride = (int)videoFrame.line_stride_in_bytes; int bufferSize = yres * stride; if (bufferSize != buffer01Size) { buffer0 = Marshal.ReAllocCoTaskMem(buffer0, bufferSize); buffer1 = Marshal.ReAllocCoTaskMem(buffer1, bufferSize); buffer01Size = bufferSize; } // Copy data unsafe { byte *dst = (byte *)buffer0.ToPointer(); byte *src = (byte *)videoFrame.p_data.ToPointer(); for (int y = 0; y < yres; y++) { memcpy(dst, src, stride); dst += stride; src += stride; } } // swap IntPtr temp = buffer0; buffer0 = buffer1; buffer1 = temp; SharpDX.DXGI.Format texFmt; switch (videoFrame.FourCC) { case NDIlib.FourCC_type_e.FourCC_type_BGRA: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_BGRX: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_RGBA: texFmt = SharpDX.DXGI.Format.R8G8B8A8_UNorm; break; case NDIlib.FourCC_type_e.FourCC_type_RGBX: texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm; break; default: texFmt = SharpDX.DXGI.Format.Unknown; // TODO: need to handle other video formats break; } if (newVideo) // it's the first time we enter the while loop, so cerate a new texture { textureDesc = new Texture2DDescription() { Width = xres, Height = yres, MipLevels = 1, ArraySize = 1, Format = texFmt, SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.ShaderResource, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; outputTexture = new Texture2D(device, textureDesc); newVideo = false; } try { DataBox srcBox = new DataBox(buffer1); device.ImmediateContext.UpdateSubresource(srcBox, outputTexture, 0); videoFrames.OnNext(outputTexture); } finally { device.ImmediateContext.UnmapSubresource(outputTexture, 0); } // free frames that were received AFTER use! // This writepixels call is dispatched, so we must do it inside this scope. NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } //// if the audio format changed, we need to reconfigure the audio device //bool formatChanged = false; //// make sure our format has been created and matches the incomming audio //if (_waveFormat == null || // _waveFormat.Channels != audioFrame.no_channels || // _waveFormat.SampleRate != audioFrame.sample_rate) //{ // //// Create a wavformat that matches the incomming frames // //_waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels); // formatChanged = true; //} //// set up our audio buffer if needed //if (_bufferedProvider == null || formatChanged) //{ // _bufferedProvider = new BufferedWaveProvider(_waveFormat); // _bufferedProvider.DiscardOnBufferOverflow = true; //} //// set up our multiplexer used to mix down to 2 output channels) //if (_multiplexProvider == null || formatChanged) //{ // _multiplexProvider = new MultiplexingWaveProvider(new List<IWaveProvider>() { _bufferedProvider }, 2); //} // // set up our audio output device // if (_wasapiOut == null || formatChanged) // { // // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example. // // This is close enough to show that audio is received and converted correctly. // _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50); // _wasapiOut.Init(_multiplexProvider); // _wasapiOut.Volume = _volume; // _wasapiOut.Play(); // } // we're working in bytes, so take the size of a 32 bit sample (float) into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float); // NAudio is expecting interleaved audio and NDI uses planar. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to float interleaved audio // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); //// push the byte[] buffer into the bufferedProvider for output //_bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; } } }