Ejemplo n.º 1
0
        // This will reenable routing if previous cleared.
        // Should not be needed otherwise since FromSource changes will automatically update.
        public void UpdateRouting()
        {
            // never started before?
            if (_routingInstancePtr == IntPtr.Zero)
            {
                CreateRouting();
                return;
            }

            // Sanity
            if (_selectedSource == null || String.IsNullOrEmpty(_selectedSource.Name))
            {
                Clear();
                return;
            }

            // a source_t to describe the source to connect to.
            NDIlib.source_t source_t = new NDIlib.source_t()
            {
                p_ndi_name = UTF.StringToUtf8(_selectedSource.Name)
            };

            if (!NDIlib.routing_change(_routingInstancePtr, ref source_t))
            {
                // free the memory we allocated with StringToUtf8
                Marshal.FreeHGlobal(source_t.p_ndi_name);

                throw new InvalidOperationException("Failed to change routing.");
            }

            // free the memory we allocated with StringToUtf8
            Marshal.FreeHGlobal(source_t.p_ndi_name);
        }
Ejemplo n.º 2
0
        // This will start recording.If the recorder was already recording then the message is ignored.A filename is passed in as a ‘hint’.Since the recorder might
        // already be recording(or might not allow complete flexibility over its filename), the filename might or might not be used.If the filename is empty, or
        // not present, a name will be chosen automatically.
        public bool RecordingStart(String filenameHint = "")
        {
            if (!_canRecord || _recvInstancePtr == IntPtr.Zero)
            {
                return(false);
            }

            bool retVal = false;

            if (String.IsNullOrEmpty(filenameHint))
            {
                retVal = NDIlib.recv_recording_start(_recvInstancePtr, IntPtr.Zero);
            }
            else
            {
                // convert to an unmanaged UTF8 IntPtr
                IntPtr fileNamePtr = UTF.StringToUtf8(filenameHint);

                retVal = NDIlib.recv_recording_start(_recvInstancePtr, IntPtr.Zero);

                // don't forget to free it
                Marshal.FreeHGlobal(fileNamePtr);
            }

            return(retVal);
        }
Ejemplo n.º 3
0
        private void CreateRouting()
        {
            if (_routingInstancePtr != IntPtr.Zero)
            {
                NDIlib.routing_destroy(_routingInstancePtr);
                _routingInstancePtr = IntPtr.Zero;
            }

            // Sanity check
            if (_selectedSource == null || String.IsNullOrEmpty(_selectedSource.Name))
            {
                return;
            }

            // .Net interop doesn't handle UTF-8 strings, so do it manually
            // These must be freed later
            IntPtr sourceNamePtr = UTF.StringToUtf8(_routingName);

            IntPtr groupsNamePtr = IntPtr.Zero;

            // make a flat list of groups if needed
            if (_groups != null)
            {
                StringBuilder flatGroups = new StringBuilder();
                foreach (String group in _groups)
                {
                    flatGroups.Append(group);
                    if (group != _groups.Last())
                    {
                        flatGroups.Append(',');
                    }
                }

                groupsNamePtr = UTF.StringToUtf8(flatGroups.ToString());
            }

            // Create an NDI routing description
            NDIlib.routing_create_t createDesc = new NDIlib.routing_create_t()
            {
                p_ndi_name = sourceNamePtr,
                p_groups   = groupsNamePtr
            };

            // create the NDI routing instance
            _routingInstancePtr = NDIlib.routing_create(ref createDesc);

            // free the strings we allocated
            Marshal.FreeHGlobal(sourceNamePtr);
            Marshal.FreeHGlobal(groupsNamePtr);

            // did it succeed?
            if (_routingInstancePtr == IntPtr.Zero)
            {
                throw new InvalidOperationException("Failed to create routing instance.");
            }

            // update in case we have enough info to start routing
            UpdateRouting();
        }
Ejemplo n.º 4
0
        public String GetRecordingError()
        {
            if (!_canRecord || _recvInstancePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }

            IntPtr errorPtr = NDIlib.recv_recording_get_error(_recvInstancePtr);

            if (errorPtr == IntPtr.Zero)
            {
                return(String.Empty);
            }
            else
            {
                String error = UTF.Utf8ToString(errorPtr);

                // free it
                NDIlib.recv_free_string(_recvInstancePtr, errorPtr);

                return(error);
            }
        }
Ejemplo n.º 5
0
        public String GetRecordingFilename()
        {
            if (!_canRecord || _recvInstancePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }

            IntPtr filenamePtr = NDIlib.recv_recording_get_filename(_recvInstancePtr);

            if (filenamePtr == IntPtr.Zero)
            {
                return(String.Empty);
            }
            else
            {
                String filename = UTF.Utf8ToString(filenamePtr);

                // free it
                NDIlib.recv_free_string(_recvInstancePtr, filenamePtr);

                return(filename);
            }
        }
Ejemplo n.º 6
0
        private void FindThreadProc()
        {
            // the size of an NDIlib.source_t, for pointer offsets
            int SourceSizeInBytes = Marshal.SizeOf(typeof(NDIlib.source_t));

            while (!_exitThread)
            {
                // Wait up to 500ms sources to change
                if (NDIlib.find_wait_for_sources(_findInstancePtr, 500))
                {
                    uint   NumSources = 0;
                    IntPtr SourcesPtr = NDIlib.find_get_current_sources(_findInstancePtr, ref NumSources);

                    // convert each unmanaged ptr into a managed NDIlib.source_t
                    for (int i = 0; i < NumSources; i++)
                    {
                        // source ptr + (index * size of a source)
                        IntPtr p = IntPtr.Add(SourcesPtr, (i * SourceSizeInBytes));

                        // marshal it to a managed source and assign to our list
                        NDIlib.source_t src = (NDIlib.source_t)Marshal.PtrToStructure(p, typeof(NDIlib.source_t));

                        // .Net doesn't handle marshaling UTF-8 strings properly
                        String name = UTF.Utf8ToString(src.p_ndi_name);

                        // Add it to the list if not already in the list.
                        // We don't have to remove because NDI applications remember any sources seen during each run.
                        // They might be selected and come back when the connection is restored.
                        if (!_sourceList.Any(item => item.Name == name))
                        {
                            _sourceList.Enqueue(new Source(src));
                        }
                    }
                }
            }
        }
Ejemplo n.º 7
0
        public Finder(bool showLocalSources = false, String[] groups = null, String[] extraIps = null)
        {
            if (!NDIlib.initialize())
            {
                if (!NDIlib.is_supported_CPU())
                {
                    throw new InvalidOperationException("CPU incompatible with NDI.");
                }
                else
                {
                    throw new InvalidOperationException("Unable to initialize NDI.");
                }
            }

            //BindingOperations.EnableCollectionSynchronization(_sourceList, _sourceLock);

            IntPtr groupsNamePtr = IntPtr.Zero;

            // make a flat list of groups if needed
            if (groups != null)
            {
                StringBuilder flatGroups = new StringBuilder();
                foreach (String group in groups)
                {
                    flatGroups.Append(group);
                    if (group != groups.Last())
                    {
                        flatGroups.Append(',');
                    }
                }

                groupsNamePtr = UTF.StringToUtf8(flatGroups.ToString());
            }

            // This is also optional.
            // The list of additional IP addresses that exist that we should query for
            // sources on. For instance, if you want to find the sources on a remote machine
            // that is not on your local sub-net then you can put a comma seperated list of
            // those IP addresses here and those sources will be available locally even though
            // they are not mDNS discoverable. An example might be "12.0.0.8,13.0.12.8".
            // When none is specified (IntPtr.Zero) the registry is used.
            // Create a UTF-8 buffer from our string
            // Must use Marshal.FreeHGlobal() after use!
            // IntPtr extraIpsPtr = NDI.Common.StringToUtf8("12.0.0.8,13.0.12.8")
            IntPtr extraIpsPtr = IntPtr.Zero;

            // make a flat list of ip addresses as comma separated strings
            if (extraIps != null)
            {
                StringBuilder flatIps = new StringBuilder();
                foreach (String ipStr in extraIps)
                {
                    flatIps.Append(ipStr);
                    if (ipStr != groups.Last())
                    {
                        flatIps.Append(',');
                    }
                }

                extraIpsPtr = UTF.StringToUtf8(flatIps.ToString());
            }

            // how we want our find to operate
            NDIlib.find_create_t findDesc = new NDIlib.find_create_t()
            {
                p_groups           = groupsNamePtr,
                show_local_sources = showLocalSources,
                p_extra_ips        = extraIpsPtr
            };

            // create our find instance
            _findInstancePtr = NDIlib.find_create_v2(ref findDesc);

            // free our UTF-8 buffer if we created one
            if (groupsNamePtr != IntPtr.Zero)
            {
                Marshal.FreeHGlobal(groupsNamePtr);
            }

            if (extraIpsPtr != IntPtr.Zero)
            {
                Marshal.FreeHGlobal(extraIpsPtr);
            }

            // start up a thread to update on
            _findThread = new Thread(FindThreadProc)
            {
                IsBackground = true, Name = "NdiFindThread"
            };
            _findThread.Start();
        }
Ejemplo n.º 8
0
        public Sender(String sourceName, bool clockVideo = true, bool clockAudio = false, String[] groups = null, String failoverName = null)
        {
            if (String.IsNullOrEmpty(sourceName))
            {
                throw new ArgumentException("sourceName can not be null or empty.", sourceName);
            }

            if (!NDIlib.initialize())
            {
                if (!NDIlib.is_supported_CPU())
                {
                    throw new InvalidOperationException("CPU incompatible with NDI.");
                }
                else
                {
                    throw new InvalidOperationException("Unable to initialize NDI.");
                }
            }

            // .Net interop doesn't handle UTF-8 strings, so do it manually
            // These must be freed later
            IntPtr sourceNamePtr = UTF.StringToUtf8(sourceName);

            IntPtr groupsNamePtr = IntPtr.Zero;

            // make a flat list of groups if needed
            if (groups != null)
            {
                StringBuilder flatGroups = new StringBuilder();
                foreach (String group in groups)
                {
                    flatGroups.Append(group);
                    if (group != groups.Last())
                    {
                        flatGroups.Append(',');
                    }
                }

                groupsNamePtr = UTF.StringToUtf8(flatGroups.ToString());
            }

            // Create an NDI source description
            NDIlib.send_create_t createDesc = new NDIlib.send_create_t()
            {
                p_ndi_name  = sourceNamePtr,
                p_groups    = groupsNamePtr,
                clock_video = clockVideo,
                clock_audio = clockAudio
            };

            // create the NDI send instance
            _sendInstancePtr = NDIlib.send_create(ref createDesc);

            // free the strings we allocated
            Marshal.FreeHGlobal(sourceNamePtr);
            Marshal.FreeHGlobal(groupsNamePtr);

            // did it succeed?
            if (_sendInstancePtr == IntPtr.Zero)
            {
                throw new InvalidOperationException("Failed to create send instance.");
            }

            if (!String.IsNullOrEmpty(failoverName))
            {
                // .Net interop doesn't handle UTF-8 strings, so do it manually
                // These must be freed later
                IntPtr failoverNamePtr = UTF.StringToUtf8(failoverName);

                NDIlib.source_t failoverDesc = new NDIlib.source_t()
                {
                    p_ndi_name    = failoverNamePtr,
                    p_url_address = IntPtr.Zero
                };

                NDIlib.send_set_failover(_sendInstancePtr, ref failoverDesc);

                // free the strings we allocated
                Marshal.FreeHGlobal(failoverNamePtr);
            }
        }
Ejemplo n.º 9
0
        // the receive thread runs though this loop until told to exit
        void ReceiveThreadProc()
        {
            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }


                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    ImagingPixelFormat pixFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        pixFmt = PixelFormat.B8G8R8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        pixFmt = PixelFormat.B8G8R8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        pixFmt = PixelFormat.R8G8B8A8; break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        pixFmt = PixelFormat.R8G8B8; break;

                    default:
                        pixFmt = PixelFormat.Unknown;            // TODO: need to handle other video formats which are currently unsupported by IImage
                        break;
                    }

                    var VideoFrameImage = buffer1.ToImage(bufferSize, xres, yres, pixFmt, videoFrame.FourCC.ToString());

                    videoFrames.OnNext(VideoFrameImage);

                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit

                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    int channelStride = audioFrame.channel_stride_in_bytes;

                    var floatBuffer = ConvertByteArrayToFloat(audBuffer, channelStride);

                    float[] outBuffer = new float[512];

                    Buffer.BlockCopy(floatBuffer, 0, outBuffer, 0, 512);

                    audioOutSignal.Read(outBuffer, 0, 512);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;


                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// connect to an NDI source in our Dictionary by name
        /// </summary>
        /// <param name="source"></param>
        /// <param name="colorFormat"></param>
        /// <param name="bandwidth"></param>
        /// <param name="allowVideoFields"></param>
        public void Connect(Source source,
                            NDIlib.recv_color_format_e colorFormat = NDIlib.recv_color_format_e.recv_color_format_BGRX_BGRA,
                            NDIlib.recv_bandwidth_e bandwidth      = NDIlib.recv_bandwidth_e.recv_bandwidth_highest,
                            bool allowVideoFields = false)
        {
            //if (System.ComponentModel.DesignerProperties.GetIsInDesignMode(this))
            //    return;

            if (String.IsNullOrEmpty(ReceiverName))
            {
                throw new ArgumentException("sourceName can not be null or empty.", ReceiverName);
            }

            // just in case we're already connected
            Disconnect();

            // Sanity
            if (source == null || String.IsNullOrEmpty(source.Name))
            {
                return;
            }

            // a source_t to describe the source to connect to.
            NDIlib.source_t source_t = new NDIlib.source_t()
            {
                p_ndi_name = UTF.StringToUtf8(source.Name)
            };

            // make a description of the receiver we want
            NDIlib.recv_create_v3_t recvDescription = new NDIlib.recv_create_v3_t()
            {
                // the source we selected
                source_to_connect_to = source_t,

                // we want BGRA frames for this example
                color_format = colorFormat,

                // we want full quality - for small previews or limited bandwidth, choose lowest
                bandwidth = bandwidth,

                // let NDIlib deinterlace for us if needed
                allow_video_fields = allowVideoFields,

                // The name of the NDI receiver to create. This is a NULL terminated UTF8 string and should be
                // the name of receive channel that you have. This is in many ways symettric with the name of
                // senders, so this might be "Channel 1" on your system.
                p_ndi_recv_name = UTF.StringToUtf8(ReceiverName)
            };

            // create a new instance connected to this source
            _recvInstancePtr = NDIlib.recv_create_v3(ref recvDescription);

            // free the memory we allocated with StringToUtf8
            Marshal.FreeHGlobal(source_t.p_ndi_name);
            Marshal.FreeHGlobal(recvDescription.p_ndi_recv_name = UTF.StringToUtf8(ReceiverName));

            // did it work?
            System.Diagnostics.Debug.Assert(_recvInstancePtr != IntPtr.Zero, "Failed to create NDI receive instance.");

            if (_recvInstancePtr != IntPtr.Zero)
            {
                // We are now going to mark this source as being on program output for tally purposes (but not on preview)
                SetTallyIndicators(true, false);

                // start up a thread to receive on
                _receiveThread = new Thread(ReceiveThreadProc)
                {
                    IsBackground = true, Name = "NdiExampleReceiveThread"
                };
                _receiveThread.Start();
            }
        }
Ejemplo n.º 11
0
        /// <summary>
        /// the receive thread runs though this loop until told to exit
        /// </summary>
        void ReceiveThreadProc()
        {
            bool newVideo = true;

            using var deviceHandle = deviceProvider.GetHandle();
            var device = deviceHandle.Resource;

            while (!_exitThread && _recvInstancePtr != IntPtr.Zero)
            {
                // The descriptors
                NDIlib.video_frame_v2_t videoFrame    = new NDIlib.video_frame_v2_t();
                NDIlib.audio_frame_v2_t audioFrame    = new NDIlib.audio_frame_v2_t();
                NDIlib.metadata_frame_t metadataFrame = new NDIlib.metadata_frame_t();

                switch (NDIlib.recv_capture_v2(_recvInstancePtr, ref videoFrame, ref audioFrame, ref metadataFrame, 1000))
                {
                // No data
                case NDIlib.frame_type_e.frame_type_none:
                    // No data received
                    break;

                // frame settings - check for extended functionality
                case NDIlib.frame_type_e.frame_type_status_change:
                    // check for PTZ
                    IsPtz = NDIlib.recv_ptz_is_supported(_recvInstancePtr);

                    // Check for recording
                    IsRecordingSupported = NDIlib.recv_recording_is_supported(_recvInstancePtr);

                    // Check for a web control URL
                    // We must free this string ptr if we get one.
                    IntPtr webUrlPtr = NDIlib.recv_get_web_control(_recvInstancePtr);
                    if (webUrlPtr == IntPtr.Zero)
                    {
                        WebControlUrl = String.Empty;
                    }
                    else
                    {
                        // convert to managed String
                        WebControlUrl = UTF.Utf8ToString(webUrlPtr);

                        // Don't forget to free the string ptr
                        NDIlib.recv_free_string(_recvInstancePtr, webUrlPtr);
                    }

                    break;

                // Video data
                case NDIlib.frame_type_e.frame_type_video:

                    // if not enabled, just discard
                    // this can also occasionally happen when changing sources
                    if (!_videoEnabled || videoFrame.p_data == IntPtr.Zero)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                        break;
                    }

                    // get all our info so that we can free the frame
                    int yres = (int)videoFrame.yres;
                    int xres = (int)videoFrame.xres;

                    // quick and dirty aspect ratio correction for non-square pixels - SD 4:3, 16:9, etc.
                    double dpiX = 96.0 * (videoFrame.picture_aspect_ratio / ((double)xres / (double)yres));

                    int stride     = (int)videoFrame.line_stride_in_bytes;
                    int bufferSize = yres * stride;


                    if (bufferSize != buffer01Size)
                    {
                        buffer0      = Marshal.ReAllocCoTaskMem(buffer0, bufferSize);
                        buffer1      = Marshal.ReAllocCoTaskMem(buffer1, bufferSize);
                        buffer01Size = bufferSize;
                    }

                    // Copy data
                    unsafe
                    {
                        byte *dst = (byte *)buffer0.ToPointer();
                        byte *src = (byte *)videoFrame.p_data.ToPointer();

                        for (int y = 0; y < yres; y++)
                        {
                            memcpy(dst, src, stride);
                            dst += stride;
                            src += stride;
                        }
                    }

                    // swap
                    IntPtr temp = buffer0;
                    buffer0 = buffer1;
                    buffer1 = temp;

                    SharpDX.DXGI.Format texFmt;
                    switch (videoFrame.FourCC)
                    {
                    case NDIlib.FourCC_type_e.FourCC_type_BGRA:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_BGRX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBA:
                        texFmt = SharpDX.DXGI.Format.R8G8B8A8_UNorm;
                        break;

                    case NDIlib.FourCC_type_e.FourCC_type_RGBX:
                        texFmt = SharpDX.DXGI.Format.B8G8R8A8_UNorm;
                        break;

                    default:
                        texFmt = SharpDX.DXGI.Format.Unknown;         // TODO: need to handle other video formats
                        break;
                    }

                    if (newVideo)     // it's the first time we enter the while loop, so cerate a new texture
                    {
                        textureDesc = new Texture2DDescription()
                        {
                            Width             = xres,
                            Height            = yres,
                            MipLevels         = 1,
                            ArraySize         = 1,
                            Format            = texFmt,
                            SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
                            Usage             = ResourceUsage.Default,
                            BindFlags         = BindFlags.ShaderResource,
                            CpuAccessFlags    = CpuAccessFlags.None,
                            OptionFlags       = ResourceOptionFlags.None
                        };

                        outputTexture = new Texture2D(device, textureDesc);

                        newVideo = false;
                    }

                    try
                    {
                        DataBox srcBox = new DataBox(buffer1);
                        device.ImmediateContext.UpdateSubresource(srcBox, outputTexture, 0);

                        videoFrames.OnNext(outputTexture);
                    }
                    finally
                    {
                        device.ImmediateContext.UnmapSubresource(outputTexture, 0);
                    }


                    // free frames that were received AFTER use!
                    // This writepixels call is dispatched, so we must do it inside this scope.
                    NDIlib.recv_free_video_v2(_recvInstancePtr, ref videoFrame);

                    break;

                // audio is beyond the scope of this example
                case NDIlib.frame_type_e.frame_type_audio:

                    // if no audio or disabled, nothing to do
                    if (!_audioEnabled || audioFrame.p_data == IntPtr.Zero || audioFrame.no_samples == 0)
                    {
                        // alreays free received frames
                        NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                        break;
                    }

                    //// if the audio format changed, we need to reconfigure the audio device
                    //bool formatChanged = false;

                    //// make sure our format has been created and matches the incomming audio
                    //if (_waveFormat == null ||
                    //    _waveFormat.Channels != audioFrame.no_channels ||
                    //    _waveFormat.SampleRate != audioFrame.sample_rate)
                    //{
                    //    //// Create a wavformat that matches the incomming frames
                    //    //_waveFormat = WaveFormat.CreateIeeeFloatWaveFormat((int)audioFrame.sample_rate, (int)audioFrame.no_channels);

                    //    formatChanged = true;
                    //}

                    //// set up our audio buffer if needed
                    //if (_bufferedProvider == null || formatChanged)
                    //{
                    //    _bufferedProvider = new BufferedWaveProvider(_waveFormat);
                    //    _bufferedProvider.DiscardOnBufferOverflow = true;
                    //}

                    //// set up our multiplexer used to mix down to 2 output channels)
                    //if (_multiplexProvider == null || formatChanged)
                    //{
                    //    _multiplexProvider = new MultiplexingWaveProvider(new List<IWaveProvider>() { _bufferedProvider }, 2);
                    //}

                    //    // set up our audio output device
                    //    if (_wasapiOut == null || formatChanged)
                    //    {
                    //        // We can't guarantee audio sync or buffer fill, that's beyond the scope of this example.
                    //        // This is close enough to show that audio is received and converted correctly.
                    //        _wasapiOut = new WasapiOut(NAudio.CoreAudioApi.AudioClientShareMode.Shared, 50);
                    //        _wasapiOut.Init(_multiplexProvider);
                    //        _wasapiOut.Volume = _volume;
                    //        _wasapiOut.Play();
                    //    }

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)audioFrame.no_samples * (int)audioFrame.no_channels * sizeof(float);

                    // NAudio is expecting interleaved audio and NDI uses planar.
                    // create an interleaved frame and convert from the one we received
                    NDIlib.audio_frame_interleaved_32f_t interleavedFrame = new NDIlib.audio_frame_interleaved_32f_t()
                    {
                        sample_rate = audioFrame.sample_rate,
                        no_channels = audioFrame.no_channels,
                        no_samples  = audioFrame.no_samples,
                        timecode    = audioFrame.timecode
                    };

                    // we need a managed byte array to add to buffered provider
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    interleavedFrame.p_data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    // There is a matching version of this that converts to interleaved 16 bit audio frames if you need 16 bit
                    NDIlib.util_audio_to_interleaved_32f_v2(ref audioFrame, ref interleavedFrame);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    //// push the byte[] buffer into the bufferedProvider for output
                    //_bufferedProvider.AddSamples(audBuffer, 0, sizeInBytes);

                    // free the frame that was received
                    NDIlib.recv_free_audio_v2(_recvInstancePtr, ref audioFrame);

                    break;

                // Metadata
                case NDIlib.frame_type_e.frame_type_metadata:

                    // UTF-8 strings must be converted for use - length includes the terminating zero
                    //String metadata = Utf8ToString(metadataFrame.p_data, metadataFrame.length-1);

                    //System.Diagnostics.Debug.Print(metadata);

                    // free frames that were received
                    NDIlib.recv_free_metadata(_recvInstancePtr, ref metadataFrame);
                    break;
                }
            }
        }
Ejemplo n.º 12
0
 // Construct from NDIlib.source_t
 public Source(NDIlib.source_t source_t)
 {
     Name = UTF.Utf8ToString(source_t.p_ndi_name);
 }