Esempio n. 1
0
        public void Send(ref NDIlib.audio_frame_v2_t audioFrame)
        {
            if (_sendInstancePtr == IntPtr.Zero)
            {
                return;
            }

            NDIlib.send_send_audio_v2(_sendInstancePtr, ref audioFrame);
        }
Esempio n. 2
0
 public AudioFrame(IntPtr bufferPtr, int sampleRate, int numChannels, int channelStride, int numSamples)
 {
     _ndiAudioFrame = new NDIlib.audio_frame_v2_t()
     {
         sample_rate             = 48000,
         no_channels             = 2,
         no_samples              = 1602,
         timecode                = NDIlib.send_timecode_synthesize,
         p_data                  = bufferPtr,
         channel_stride_in_bytes = channelStride,
         p_metadata              = IntPtr.Zero,
         timestamp               = NDIlib.recv_timestamp_undefined
     };
 }
        private void OnDataAvailable(object sender, WaveInEventArgs e)
        {
            int numberOfChannels = audioCapture.WaveFormat.Channels;
            int sampleRate       = audioCapture.WaveFormat.SampleRate;
            // interpret as 32 bit floating point audio
            int samples = e.BytesRecorded / 4;

            WaveBuffer waveBuffer = new WaveBuffer(e.Buffer);

            float[] buffer = waveBuffer.FloatBuffer;
            for (int index = 0; index < samples; index++)
            {
                buffer[index] = waveBuffer.FloatBuffer[index];
            }

            // allocate some memory for a audio buffer
            IntPtr bufferPtr = Marshal.AllocHGlobal(numberOfChannels * samples * sizeof(float));

            NDIlib.audio_frame_v2_t audioFrame = new NDIlib.audio_frame_v2_t()
            {
                // Sample rate
                sample_rate = sampleRate,
                // Number of channels (1 = mono, 2 = stereo)
                no_channels = numberOfChannels,
                // Number of samples
                no_samples = samples,
                // Timecode.
                timecode = NDIlib.send_timecode_synthesize,
                // The audio memory used for this frame
                p_data = bufferPtr,
                // The inter channel stride
                channel_stride_in_bytes = sizeof(float) * samples,
                // no metadata
                p_metadata = IntPtr.Zero,
                // only valid on received frames
                timestamp = 0
            };

            for (int ch = 0; ch < audioFrame.no_channels; ch++)
            {
                // where does this channel start in the buffer?
                IntPtr dest = new IntPtr(audioFrame.p_data.ToInt64() + (ch * audioFrame.channel_stride_in_bytes));
                // copy the buffer into the channel
                Marshal.Copy(buffer, 0, dest, audioFrame.no_samples);
            }

            // add it to the output queue
            AddAudioFrame(audioFrame);
        }
Esempio n. 4
0
        public AudioFrame(int maxSamples, int sampleRate, int numChannels)
        {
            // we have to know to free it later
            _memoryOwned = true;

            IntPtr audioBufferPtr = Marshal.AllocHGlobal(numChannels * maxSamples * sizeof(float));

            _ndiAudioFrame = new NDIlib.audio_frame_v2_t()
            {
                sample_rate             = sampleRate,
                no_channels             = numChannels,
                no_samples              = maxSamples,
                timecode                = NDIlib.send_timecode_synthesize,
                p_data                  = audioBufferPtr,
                channel_stride_in_bytes = sizeof(float) * maxSamples,
                p_metadata              = IntPtr.Zero,
                timestamp               = NDIlib.recv_timestamp_undefined
            };
        }
        public bool AddAudioFrame(NDIlib.audio_frame_v2_t frame)
        {
            try
            {
                pendingAudioFrames.Add(frame);
            }
            catch (OperationCanceledException)
            {
                // we're shutting down
                pendingAudioFrames.CompleteAdding();
                return(false);
            }
            catch
            {
                return(false);
            }

            return(true);
        }
        public void Stop()
        {
            // tell the thread to exit
            exitThread = true;

            // Video
            CompositionTarget.Rendering -= OnCompositionTargetRendering;

            // Audio
            if (audioCapture != null)
            {
                audioCapture.StopRecording();
                audioCapture = null;
            }

            // wait for it to exit
            if (sendThread != null)
            {
                sendThread.Join();

                sendThread = null;
            }

            // cause the pulling of frames to fail
            pendingVideoFrames.CompleteAdding();
            pendingAudioFrames.CompleteAdding();

            // clear any pending video frames
            while (pendingVideoFrames.Count > 0)
            {
                NDIlib.video_frame_v2_t discardFrame = pendingVideoFrames.Take();
                Marshal.FreeHGlobal(discardFrame.p_data);
            }

            // clear any pending audio frames
            while (pendingAudioFrames.Count > 0)
            {
                NDIlib.audio_frame_v2_t discardFrame = pendingAudioFrames.Take();
                Marshal.FreeHGlobal(discardFrame.p_data);
            }
        }
Esempio n. 7
0
            void ReceiveThreadProc()
            {
                while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
                {
                    // The descriptors
                    NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                    NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                    NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                    switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                    {
                    // No data
                    case NDIlib.frame_type_e.frame_type_none:
                        // No data received
                        break;

                    // Video data
                    case NDIlib.frame_type_e.frame_type_video:

                        // if not enabled or lockBuffer flag = true, just discard
                        // this can also occasionally happen when changing sources
                        if (videoFrame.p_data == IntPtr.Zero || lockBuffer)
                        {
                            // alreays free received frames
                            NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                            break;
                        }

                        // check receive data
                        //FLogger.Log(LogType.Message,
                        //    "FourCC: " + Enum.GetName(typeof(NDIlib.FourCC_type_e), videoFrame.FourCC)
                        //    + ", frame_format_type: " + Enum.GetName(typeof(NDIlib.frame_format_type_e), videoFrame.frame_format_type)
                        //    + ", frameRate_D: " + videoFrame.frame_rate_D
                        //    + ", frameRate_N: " + videoFrame.frame_rate_N
                        //    + ", line_stride_in_bytes: " + videoFrame.line_stride_in_bytes
                        //    + ", picture_aspect_ratio: " + videoFrame.picture_aspect_ratio
                        //    + ", xres: " + videoFrame.xres
                        //    + ", yres: " + videoFrame.yres
                        //);

                        // get all our info so that we can free the frame
                        int yres = (int)videoFrame.yres;
                        int xres = (int)videoFrame.xres;

                        width  = xres;
                        height = yres;

                        // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                        //double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                        int stride = (int)videoFrame.line_stride_in_bytes;
                        int size   = yres * stride;

                        // allocate some memory for a video buffer
                        if (bufferSize != size)
                        {
                            if (buffer_ptr != IntPtr.Zero)
                            {
                                Marshal.FreeHGlobal(buffer_ptr);
                            }

                            buffer_ptr = Marshal.AllocHGlobal((int)size);

                            bufferSize = size;
                        }

                        // copy frame data
                        CopyMemory(buffer_ptr, videoFrame.p_data, bufferSize);

                        // free frames that were received
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        // set flag for update texture
                        //invalidate = true;

                        break;

                    // not support audio
                    case NDIlib.frame_type_e.frame_type_audio:
                        // always free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;

                    // Metadata
                    case NDIlib.frame_type_e.frame_type_metadata:

                        // UTF-8 strings must be converted for use - length includes the terminating zero
                        //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                        //System.Diagnostics.Debug.Print(metadata);

                        // free frames that were received
                        NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                        break;
                    }
                }
            }
Esempio n. 8
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (((double)xres / (double)yres) / videoFrame.picture_aspect_ratio);

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;

                    // We need to be on the UI thread to write to our bitmap
                    // Not very efficient, but this is just an example
                    Dispatcher.BeginInvoke(new Action(delegate
                    {
                        // resize the writeable if needed
                        if (VideoBitmap == null ||
                            VideoBitmap.PixelWidth != xres ||
                            VideoBitmap.PixelHeight != yres ||
                            Math.Abs(VideoBitmap.DpiX - dpiX) > 0.001)
                        {
                            VideoBitmap = null;
                            GC.Collect(1);
                            VideoBitmap         = new WriteableBitmap(xres, yres, dpiX, 96.0, PixelFormats.Pbgra32, null);
                            VideoSurface.Source = VideoBitmap;
                        }

                        // update the writeable bitmap
                        VideoBitmap.WritePixels(new Int32Rect(0, 0, xres, yres), videoFrame.p_data, bufferSize, stride);

                        // free frames that were received AFTER use!
                        // This writepixels call is dispatched, so we must do it inside this scope.
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);
                    }));

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    // if the audio format changed, we need to reconfigure the audio device
                    bool formatChanged = false;

                    // make sure our format has been created and matches the incomming audio
                    if (_waveFormat == null ||
                        _waveFormat.Channels != audioFrame.no_channels ||
                        _waveFormat.SampleRate != audioFrame.sample_rate)
                    {
                        // Create a wavformat that matches the incomming frames
                        _waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels);

                        formatChanged = true;
                    }

                    // set up our audio buffer if needed
                    if (_bufferedProvider == null || formatChanged)
                    {
                        _bufferedProvider = new BufferedWaveProvider(_waveFormat);
                        _bufferedProvider.DiscardOnBufferOverflow = true;
                    }

                    // set up our multiplexer used to mix down to 2 output channels)
                    if (_multiplexProvider == null || formatChanged)
                    {
                        _multiplexProvider = new MultiplexingWaveProvider(new List <IWaveProvider>()
                        {
                            _bufferedProvider
                        }, 2);
                    }

                    // set up our audio output device
                    if (_haveAudioDevice && (_wasapiOut == null || formatChanged))
                    {
                        try
                        {
                            // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example.
                            // This is close enough to show that audio is received and converted correctly.
                            _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50);
                            _wasapiOut.Init(_multiplexProvider);
                            _wasapiOut.Volume = _volume;
                            _wasapiOut.Play();
                        }
                        catch
                        {
                            // if this fails, assume that there is no audio device on the system
                            // so that we don't retry/catch on every audio frame received
                            _haveAudioDevice = false;
                        }
                    }

                    // did we get a device?
                    if (_haveAudioDevice && _wasapiOut != null)
                    {
                        // we're working in bytes, so take the size of a 32 bit sample (float) into account
                        int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                        // NAudio is expecting interleaved audio and NDI uses planar.
                        // create an interleaved frame and convert from the one we received
                        NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                        {
                            sample_rate = audioFrame.sample_rate,
                            no_channels = audioFrame.no_channels,
                            no_samples  = audioFrame.no_samples,
                            timecode    = audioFrame.timecode
                        };

                        // we need a managed byte array to add to buffered provider
                        byte[] audBuffer = new byte[sizeInBytes];

                        // pin the byte[] and get a GC handle to it
                        // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                        // the data will only be moved once, during the fast interleave step that is required anyway
                        GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                        // access it by an IntPtr and use it for our interleaved audio buffer
                        interleavedFrame.p_data = handle.AddrOfPinnedObject();

                        // Convert from float planar to float interleaved audio
                        // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit
                        NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                        // release the pin on the byte[]
                        // never try to access p_data after the byte[] has been unpinned!
                        // that IntPtr will no longer be valid.
                        handle.Free();

                        // push the byte[] buffer into the bufferedProvider for output
                        _bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes);
                    }

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
        // the receive thread runs though this loop until told to exit
        // AccessViolationException
        void ReceiveThreadProc()
        {
            while (!_exitThread && (_recvInstancePtr != IntPtr.Zero))
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // 何もしない
                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (_videoEnabled == false || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    //double dpiX = 96.0 * ((double)videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;

                    // We need to be on the UI thread to write to our bitmap
                    // Not very efficient, but this is just an example
                    Dispatcher.BeginInvoke(new Action(delegate
                    {
                        if (_videoEnabled == false)
                        {
                            return;
                        }

                        // resize the writeable if needed
                        if (VideoBitmap == null ||
                            VideoBitmap.PixelWidth != xres ||
                            VideoBitmap.PixelHeight != yres)
                        {
                            VideoBitmap         = new WriteableBitmap(xres, yres, 96.0, 96.0, PixelFormats.Pbgra32, null);
                            VideoSurface.Source = VideoBitmap;
                        }

                        try
                        {
                            VideoBitmap.Lock();

                            IntPtr pBackBuffer = VideoBitmap.BackBuffer;

                            try
                            {
                                CopyMemory(pBackBuffer, videoFrame.p_data, (uint)bufferSize);
                            }
                            catch
                            {
                                return;
                            }

                            // Specify the area of the bitmap that changed.
                            VideoBitmap.AddDirtyRect(new Int32Rect(0, 0, xres, yres));
                        }
                        finally
                        {
                            VideoBitmap.Unlock();
                        }

                        // free frames that were received AFTER use!
                        // This writepixels call is dispatched, so we must do it inside this scope.
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);
                    }));

#if DEBUG
                    // フレームレートを計算
                    DateTime now  = DateTime.Now;
                    TimeSpan dist = now - last;
                    if (dist.Seconds > 0)
                    {
                        Console.WriteLine("フレームレート:<1");
                    }
                    else
                    {
                        if (dist.Milliseconds == 0)
                        {
                            Console.WriteLine("フレームレート:∞");
                        }
                        else
                        {
                            int frame = 1000 / dist.Milliseconds;
                            Console.WriteLine("フレームレート:{0}", frame.ToString());
                        }
                    }
                    last = now;
#endif
                    break;

                case NDIlib.frame_type_e.frame_type_audio:
                    // 何もしない
                    // ここで解放しておかないとメモリの中に残るので注意
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);
                    break;

                case NDIlib.frame_type_e.frame_type_metadata:
                    // 何もしない
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Esempio n. 10
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }


                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    ImagingPixelFormat pixFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        pixFmt = PixelFormat.B8G8R8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        pixFmt = PixelFormat.B8G8R8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        pixFmt = PixelFormat.R8G8B8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        pixFmt = PixelFormat.R8G8B8; break;

                    default:
                        pixFmt = PixelFormat.Unknown;            // TODO: need to handle other video formats which are currently unsupported by IImage
                        break;
                    }

                    var VideoFrameImage = buffer1.ToImage(bufferSize, xres, yres, pixFmt, videoFrame.FourCC.ToString());

                    videoFrames.OnNext(VideoFrameImage);

                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit

                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    int channelStride = audioFrame.channel_stride_in_bytes;

                    var floatBuffer = ConvertByteArrayToFloat(audBuffer, channelStride);

                    float[] outBuffer = new float[512];

                    Buffer.BlockCopy(floatBuffer, 0, outBuffer, 0, 512);

                    audioOutSignal.Read(outBuffer, 0, 512);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;


                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Esempio n. 11
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            int receivedFrames = 0;

            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:
                    receivedFrames++;
                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int    stride     = (int)videoFrame.line_stride_in_bytes;
                    int    bufferSize = yres * stride;
                    Bitmap NDIRecvd   = new Bitmap(xres, yres, stride, System.Drawing.Imaging.PixelFormat.Format32bppPArgb, videoFrame.p_data);
                    /* this is done in apply resolution bias, but we need really small texture*/
                    lock (ReceiverLock)
                    {
                        if (_bitmap != null)
                        {
                            _bitmap.Dispose();
                        }
                        _bitmap = new Bitmap(xres / 4, yres / 4);
                        using (var g = Graphics.FromImage(_bitmap))
                        {
                            g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.Bilinear;
                            g.DrawImage(NDIRecvd, 0, 0, _bitmap.Width, _bitmap.Height);
                        }
                        NDIRecvd.Dispose();
                    }

                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);
                    break;

                // audio is not used
                case NDIlib.frame_type_e.frame_type_audio:

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Esempio n. 12
0
        /// <summary>
        /// the receive thread runs though this loop until told to exit
        /// </summary>
        void ReceiveThreadProc()
        {
            bool newVideo = true;

            using var deviceHandle = deviceProvider.GetHandle();
            var device = deviceHandle.Resource;

            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }

                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    SharpDX.DXGI.Format texFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        texFmt = SharpDX.DXGI.Format.R8G8B8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    default:
                        texFmt = SharpDX.DXGI.Format.Unknown;         // TODO: need to handle other video formats
                        break;
                    }

                    if (newVideo)     // it's the first time we enter the while loop, so cerate a new texture
                    {
                        textureDesc = new Texture2DDescription()
                        {
                            Width             = xres,
                            Height            = yres,
                            MipLevels         = 1,
                            ArraySize         = 1,
                            Format            = texFmt,
                            SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
                            Usage             = ResourceUsage.Default,
                            BindFlags         = BindFlags.ShaderResource,
                            CpuAccessFlags    = CpuAccessFlags.None,
                            OptionFlags       = ResourceOptionFlags.None
                        };

                        outputTexture = new Texture2D(device, textureDesc);

                        newVideo = false;
                    }

                    try
                    {
                        DataBox srcBox = new DataBox(buffer1);
                        device.ImmediateContext.UpdateSubresource(srcBox, outputTexture, 0);

                        videoFrames.OnNext(outputTexture);
                    }
                    finally
                    {
                        device.ImmediateContext.UnmapSubresource(outputTexture, 0);
                    }


                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    //// if the audio format changed, we need to reconfigure the audio device
                    //bool formatChanged = false;

                    //// make sure our format has been created and matches the incomming audio
                    //if (_waveFormat == null ||
                    //    _waveFormat.Channels != audioFrame.no_channels ||
                    //    _waveFormat.SampleRate != audioFrame.sample_rate)
                    //{
                    //    //// Create a wavformat that matches the incomming frames
                    //    //_waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels);

                    //    formatChanged = true;
                    //}

                    //// set up our audio buffer if needed
                    //if (_bufferedProvider == null || formatChanged)
                    //{
                    //    _bufferedProvider = new BufferedWaveProvider(_waveFormat);
                    //    _bufferedProvider.DiscardOnBufferOverflow = true;
                    //}

                    //// set up our multiplexer used to mix down to 2 output channels)
                    //if (_multiplexProvider == null || formatChanged)
                    //{
                    //    _multiplexProvider = new MultiplexingWaveProvider(new List<IWaveProvider>() { _bufferedProvider }, 2);
                    //}

                    //    // set up our audio output device
                    //    if (_wasapiOut == null || formatChanged)
                    //    {
                    //        // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example.
                    //        // This is close enough to show that audio is received and converted correctly.
                    //        _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50);
                    //        _wasapiOut.Init(_multiplexProvider);
                    //        _wasapiOut.Volume = _volume;
                    //        _wasapiOut.Play();
                    //    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit
                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    //// push the byte[] buffer into the bufferedProvider for output
                    //_bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Esempio n. 13
0
 internal AudioFrame(NDIlib.audio_frame_v2_t ndiAudioFrame)
 {
     _memoryOwned   = false;
     _ndiAudioFrame = ndiAudioFrame;
 }
Esempio n. 14
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    ReceivingFrames = false;
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = NDI.UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:
                    Connected       = true;
                    ReceivingFrames = true;

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    VideoFrameReceivedEventArgs videoArgs = new VideoFrameReceivedEventArgs();
                    videoArgs.Frame = new VideoFrame(videoFrame);
                    VideoFrameReceived?.Invoke(this, videoArgs);

                    // free frames that were received AFTER use!
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);
                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:
                    Connected       = true;
                    ReceivingFrames = true;

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    // we're working in bytes, so take the size of a 16 bit sample into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(short);

                    // NDI uses planar, but we'll return interleaved which Pcm uses by default.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_16s_t interleavedFrame = new NDIlib.audio_frame_interleaved_16s_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to 16 bit interleaved audio
                    NDIlib.util_audio_to_interleaved_16s_v2(ref audioFrame, ref interleavedFrame);

                    AudioFrameReceivedEventArgs audioArgs = new AudioFrameReceivedEventArgs();
                    audioArgs.Frame = new AudioFrame16bpp(interleavedFrame);
                    AudioFrameReceived?.Invoke(this, audioArgs);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);
                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;

                case NDIlib.frame_type_e.frame_type_error:
                    Connected = false;
                    break;
                }
            }
        }