コード例 #1
0
        public String GetRecordingError()
        {
            if (!_canRecord || _recvInstancePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }

            IntPtr errorPtr = NDIlib.recv_recording_get_error(_recvInstancePtr);

            if (errorPtr == IntPtr.Zero)
            {
                return(String.Empty);
            }
            else
            {
                String error = UTF.Utf8ToString(errorPtr);

                // free it
                NDIlib.recv_free_string(_recvInstancePtr, errorPtr);

                return(error);
            }
        }
コード例 #2
0
        public String GetRecordingFilename()
        {
            if (!_canRecord || _recvInstancePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }

            IntPtr filenamePtr = NDIlib.recv_recording_get_filename(_recvInstancePtr);

            if (filenamePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }
            else
            {
                String filename = UTF.Utf8ToString(filenamePtr);

                // free it
                NDIlib.recv_free_string(_recvInstancePtr, filenamePtr);

                return(filename);
            }
        }
コード例 #3
0
        private void FindThreadProc()
        {
            // the size of an NDIlib.source_t, for pointer offsets
            int SourceSizeInBytes = Marshal.SizeOf(typeof(NDIlib.source_t));

            while (!_exitThread)
            {
                // Wait up to 500ms sources to change
                if (NDIlib.find_wait_for_sources(_findInstancePtr, 500))
                {
                    uint   NumSources = 0;
                    IntPtr SourcesPtr = NDIlib.find_get_current_sources(_findInstancePtr, ref NumSources);

                    // convert each unmanaged ptr into a managed NDIlib.source_t
                    for (int i = 0; i < NumSources; i++)
                    {
                        // source ptr + (index * size of a source)
                        IntPtr p = IntPtr.Add(SourcesPtr, (i * SourceSizeInBytes));

                        // marshal it to a managed source and assign to our list
                        NDIlib.source_t src = (NDIlib.source_t)Marshal.PtrToStructure(p, typeof(NDIlib.source_t));

                        // .Net doesn't handle marshaling UTF-8 strings properly
                        String name = UTF.Utf8ToString(src.p_ndi_name);

                        // Add it to the list if not already in the list.
                        // We don't have to remove because NDI applications remember any sources seen during each run.
                        // They might be selected and come back when the connection is restored.
                        if (!_sourceList.Any(item => item.Name == name))
                        {
                            _sourceList.Enqueue(new Source(src));
                        }
                    }
                }
            }
        }
コード例 #4
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }


                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    ImagingPixelFormat pixFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        pixFmt = PixelFormat.B8G8R8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        pixFmt = PixelFormat.B8G8R8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        pixFmt = PixelFormat.R8G8B8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        pixFmt = PixelFormat.R8G8B8; break;

                    default:
                        pixFmt = PixelFormat.Unknown;            // TODO: need to handle other video formats which are currently unsupported by IImage
                        break;
                    }

                    var VideoFrameImage = buffer1.ToImage(bufferSize, xres, yres, pixFmt, videoFrame.FourCC.ToString());

                    videoFrames.OnNext(VideoFrameImage);

                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit

                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    int channelStride = audioFrame.channel_stride_in_bytes;

                    var floatBuffer = ConvertByteArrayToFloat(audBuffer, channelStride);

                    float[] outBuffer = new float[512];

                    Buffer.BlockCopy(floatBuffer, 0, outBuffer, 0, 512);

                    audioOutSignal.Read(outBuffer, 0, 512);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;


                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
コード例 #5
0
ファイル: ReceiverTexture.cs プロジェクト: sebllll/VL.IO.NDI
        /// <summary>
        /// the receive thread runs though this loop until told to exit
        /// </summary>
        void ReceiveThreadProc()
        {
            bool newVideo = true;

            using var deviceHandle = deviceProvider.GetHandle();
            var device = deviceHandle.Resource;

            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }

                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    SharpDX.DXGI.Format texFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        texFmt = SharpDX.DXGI.Format.R8G8B8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    default:
                        texFmt = SharpDX.DXGI.Format.Unknown;         // TODO: need to handle other video formats
                        break;
                    }

                    if (newVideo)     // it's the first time we enter the while loop, so cerate a new texture
                    {
                        textureDesc = new Texture2DDescription()
                        {
                            Width             = xres,
                            Height            = yres,
                            MipLevels         = 1,
                            ArraySize         = 1,
                            Format            = texFmt,
                            SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
                            Usage             = ResourceUsage.Default,
                            BindFlags         = BindFlags.ShaderResource,
                            CpuAccessFlags    = CpuAccessFlags.None,
                            OptionFlags       = ResourceOptionFlags.None
                        };

                        outputTexture = new Texture2D(device, textureDesc);

                        newVideo = false;
                    }

                    try
                    {
                        DataBox srcBox = new DataBox(buffer1);
                        device.ImmediateContext.UpdateSubresource(srcBox, outputTexture, 0);

                        videoFrames.OnNext(outputTexture);
                    }
                    finally
                    {
                        device.ImmediateContext.UnmapSubresource(outputTexture, 0);
                    }


                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    //// if the audio format changed, we need to reconfigure the audio device
                    //bool formatChanged = false;

                    //// make sure our format has been created and matches the incomming audio
                    //if (_waveFormat == null ||
                    //    _waveFormat.Channels != audioFrame.no_channels ||
                    //    _waveFormat.SampleRate != audioFrame.sample_rate)
                    //{
                    //    //// Create a wavformat that matches the incomming frames
                    //    //_waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels);

                    //    formatChanged = true;
                    //}

                    //// set up our audio buffer if needed
                    //if (_bufferedProvider == null || formatChanged)
                    //{
                    //    _bufferedProvider = new BufferedWaveProvider(_waveFormat);
                    //    _bufferedProvider.DiscardOnBufferOverflow = true;
                    //}

                    //// set up our multiplexer used to mix down to 2 output channels)
                    //if (_multiplexProvider == null || formatChanged)
                    //{
                    //    _multiplexProvider = new MultiplexingWaveProvider(new List<IWaveProvider>() { _bufferedProvider }, 2);
                    //}

                    //    // set up our audio output device
                    //    if (_wasapiOut == null || formatChanged)
                    //    {
                    //        // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example.
                    //        // This is close enough to show that audio is received and converted correctly.
                    //        _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50);
                    //        _wasapiOut.Init(_multiplexProvider);
                    //        _wasapiOut.Volume = _volume;
                    //        _wasapiOut.Play();
                    //    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit
                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    //// push the byte[] buffer into the bufferedProvider for output
                    //_bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
コード例 #6
0
 // Construct from NDIlib.source_t
 public Source(NDIlib.source_t source_t)
 {
     Name = UTF.Utf8ToString(source_t.p_ndi_name);
 }