public void Send(ref NDIlib.audio_frame_interleaved_16s_t audioFrame) { if (_sendInstancePtr == IntPtr.Zero) { return; } NDIlib.util_send_send_audio_interleaved_16s(_sendInstancePtr, ref audioFrame); }
public AudioFrame16bpp(int maxSamples, int sampleRate, int numChannels) { // we have to know to free it later _memoryOwned = true; IntPtr audioBufferPtr = Marshal.AllocHGlobal(numChannels * maxSamples * sizeof(short)); _ndiAudioFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = sampleRate, no_channels = numChannels, no_samples = maxSamples, timecode = NDIlib.send_timecode_synthesize, p_data = audioBufferPtr, reference_level = 0 }; }
private void AudioCap_DataAvailable(object sender, WaveInEventArgs e) { if (isPausedValue || sendInstancePtr == IntPtr.Zero) { return; } // how many samples? int numSamples = (e.BytesRecorded / (audioNumChannels * audioSampleSizeInBytes)); // how much float buffer will this need? int bufferSizeNeeded = numSamples * audioNumChannels * sizeof(float); // is our audio frame big enough? too big is fine if (audioBufferSize < bufferSizeNeeded || audioFrame.p_data == IntPtr.Zero) { if (audioFrame.p_data != null) { Marshal.FreeHGlobal(audioFrame.p_data); audioFrame.p_data = IntPtr.Zero; } audioFrame.p_data = Marshal.AllocHGlobal(bufferSizeNeeded); audioBufferSize = bufferSizeNeeded; } // set these every time because why not? audioFrame.sample_rate = audioSampleRate; audioFrame.no_channels = audioNumChannels; audioFrame.no_samples = numSamples; // pin the byte[] audio received and get a GC handle to it GCHandle interleavedHandle = GCHandle.Alloc(e.Buffer, GCHandleType.Pinned); if (audioSampleSizeInBytes == 2) { // make an temporary interleaved NDI audio frame around the received samples NDIlib.audio_frame_interleaved_16s_t interleavedShortFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioSampleRate, no_channels = audioNumChannels, no_samples = numSamples, p_data = interleavedHandle.AddrOfPinnedObject() }; // Convert from s16 interleaved to float planar audio NDIlib.util_audio_from_interleaved_16s_v2(ref interleavedShortFrame, ref audioFrame); } else if (audioSampleSizeInBytes == 4) { // make an temporary interleaved NDI audio frame around the received samples NDIlib.audio_frame_interleaved_32f_t interleavedFloatFrame = new NDIlib.audio_frame_interleaved_32f_t() { sample_rate = audioSampleRate, no_channels = audioNumChannels, no_samples = numSamples, p_data = interleavedHandle.AddrOfPinnedObject() }; // Convert from float interleaved to float planar audio NDIlib.util_audio_from_interleaved_32f_v2(ref interleavedFloatFrame, ref audioFrame); } else { System.Diagnostics.Debug.Assert(false, "Unexpected audio sample size."); } // release the GC pinning of the byte[]'s interleavedHandle.Free(); Monitor.Enter(sendInstanceLock); // send the planar frame if (sendInstancePtr != IntPtr.Zero) { if (!IsSendPaused) { NDIlib.send_send_audio_v2(sendInstancePtr, ref audioFrame); } } Monitor.Exit(sendInstanceLock); }
internal AudioFrame16bpp(NDIlib.audio_frame_interleaved_16s_t ndiAudioFrame) { _memoryOwned = false; _ndiAudioFrame = ndiAudioFrame; }
// the receive thread runs though this loop until told to exit void ReceiveThreadProc() { while (!_exitThread && _recvInstancePtr != IntPtr.Zero) { // The descriptors NDIlib.video_frame_v2_t videoFrame = new NDIlib.video_frame_v2_t(); NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t(); NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t(); switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000)) { // No data case NDIlib.frame_type_e.frame_type_none: // No data received ReceivingFrames = false; break; // frame settings - check for extended functionality case NDIlib.frame_type_e.frame_type_status_change: // check for PTZ IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr); // Check for recording IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr); // Check for a web control URL // We must free this string ptr if we get one. IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr); if (webUrlPtr == IntPtr.Zero) { WebControlUrl = String.Empty; } else { // convert to managed String WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr); // Don't forget to free the string ptr NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr); } break; // Video data case NDIlib.frame_type_e.frame_type_video: Connected = true; ReceivingFrames = true; // if not enabled, just discard // this can also occasionally happen when changing sources if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero) { // alreays free received frames NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; } VideoFrameReceivedEventArgs videoArgs = new VideoFrameReceivedEventArgs(); videoArgs.Frame = new VideoFrame(videoFrame); VideoFrameReceived?.Invoke(this, videoArgs); // free frames that were received AFTER use! NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame); break; // audio is beyond the scope of this example case NDIlib.frame_type_e.frame_type_audio: Connected = true; ReceivingFrames = true; // if no audio or disabled, nothing to do if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0) { // alreays free received frames NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; } // we're working in bytes, so take the size of a 16 bit sample into account int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(short); // NDI uses planar, but we'll return interleaved which Pcm uses by default. // create an interleaved frame and convert from the one we received NDIlib.audio_frame_interleaved_16s_t interleavedFrame = new NDIlib.audio_frame_interleaved_16s_t() { sample_rate = audioFrame.sample_rate, no_channels = audioFrame.no_channels, no_samples = audioFrame.no_samples, timecode = audioFrame.timecode }; // we need a managed byte array to add to buffered provider byte[] audBuffer = new byte[sizeInBytes]; // pin the byte[] and get a GC handle to it // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later // the data will only be moved once, during the fast interleave step that is required anyway GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned); // access it by an IntPtr and use it for our interleaved audio buffer interleavedFrame.p_data = handle.AddrOfPinnedObject(); // Convert from float planar to 16 bit interleaved audio NDIlib.util_audio_to_interleaved_16s_v2(ref audioFrame, ref interleavedFrame); AudioFrameReceivedEventArgs audioArgs = new AudioFrameReceivedEventArgs(); audioArgs.Frame = new AudioFrame16bpp(interleavedFrame); AudioFrameReceived?.Invoke(this, audioArgs); // release the pin on the byte[] // never try to access p_data after the byte[] has been unpinned! // that IntPtr will no longer be valid. handle.Free(); // free the frame that was received NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame); break; // Metadata case NDIlib.frame_type_e.frame_type_metadata: // UTF-8 strings must be converted for use - length includes the terminating zero //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1); //System.Diagnostics.Debug.Print(metadata); // free frames that were received NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame); break; case NDIlib.frame_type_e.frame_type_error: Connected = false; break; } } }