protected void ProcessFrameReceived(object sender, NDI.AudioFrameReceivedEventArgs e) { if (!_BufferAllocated || (e.Frame.NumChannels * e.Frame.NumSamples * sizeof(short)) > _BufferSize) { if (e.Frame.NumChannels != _NumChannels) { _Log.Error($"--audio-channel-count argument doesn't match what's being received. Configured: {_NumChannels} Received: {e.Frame.NumChannels}"); } if (e.Frame.SampleRate != _SampleRate) { _Log.Error($"--audio-clock-rate argument doesn't match what's being received. Configured: {_SampleRate} Received: {e.Frame.SampleRate}"); } AllocateBuffer(e.Frame.SampleRate, e.Frame.NumSamples, e.Frame.NumChannels); } // Populate buffer with NDI frame data Marshal.Copy(e.Frame.AudioBuffer, _Buffer.DataBuffer.Data, 0, e.Frame.NumChannels * e.Frame.NumSamples * sizeof(short)); // rusty.clarkson: Due to int rounding we might be losing samples... int duration = (1000 * e.Frame.NumSamples) / e.Frame.SampleRate; RaiseFrame(new AudioFrame(duration, _Buffer)); }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received ReceivingFrames = false; break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: Connected = true; ReceivingFrames = true; // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } VideoFrameReceivedEventArgs videoArgs = new VideoFrameReceivedEventArgs(); videoArgs.Frame = new VideoFrame(videoFrame); VideoFrameReceived?.Invoke(this, videoArgs); // free frames that were received AFTER use! NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: Connected = true; ReceivingFrames = true; // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 16 bit sample into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(short); // NDI uses planar, but we'll return interleaved which Pcm uses by default. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_16s_t interleavedFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to 16 bit interleaved audio NDIlib.util_audio_to_interleaved_16s_v2(ref audioFrame, ref interleavedFrame); AudioFrameReceivedEventArgs audioArgs = new AudioFrameReceivedEventArgs(); audioArgs.Frame = new AudioFrame16bpp(interleavedFrame); AudioFrameReceived?.Invoke(this, audioArgs); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; case NDIlib.frame_type_e.frame_type_error: Connected = false; break; } } }