Beispiel #1
0
        public void Stop()
        {
            logger.Debug("ScreenReceiver::Stop()");

            if (rtpReceiver != null)
            {
                rtpReceiver.RtpPacketReceived -= RtpReceiver_RtpPacketReceived;
                rtpReceiver.Stop();
            }

            if (decoder != null)
            {
                decoder.Close();
                decoder = null;
            }

            if (processor != null)
            {
                processor.Close();
                processor = null;
            }

            if (sharedTexture != null)
            {
                sharedTexture.Dispose();
                sharedTexture = null;
            }
            if (device != null)
            {
                device.Dispose();
                device = null;
            }

            // Statistic.UnregisterCounter(receiverStats);
        }
        public void Close()
        {
            logger.Debug("VideoCaptureSource::Close()");

            if (mediaSource != null)
            {
                //mediaSource?.Shutdown();

                mediaSource.Dispose();
                mediaSource = null;
            }

            if (sourceReader != null)
            {
                sourceReader.Dispose();
                sourceReader = null;
            }

            if (device != null)
            {
                device.Dispose();
                device = null;
            }

            if (SharedTexture != null)
            {
                SharedTexture.Dispose();
                SharedTexture = null;
            }

            if (texture != null)
            {
                texture.Dispose();
                texture = null;
            }

            if (processor != null)
            {
                processor.Close();
                processor = null;
            }
        }
Beispiel #3
0
        private void CleanUp()
        {
            if (sourceReaderCallback != null)
            {
                sourceReaderCallback.OnReadSample -= SourceReaderCallback_OnReadSample;
                sourceReaderCallback.OnFlush      -= SourceReaderCallback_OnFlush;
            }

            if (sourceReader != null)
            {
                //sourceReader.Flush(SourceReaderIndex.FirstVideoStream);

                sourceReader.Dispose();
                sourceReader = null;
            }

            if (device != null)
            {
                device.Dispose();
                device = null;
            }

            if (SharedTexture != null)
            {
                SharedTexture.Dispose();
                SharedTexture = null;
            }

            if (stagingTexture != null)
            {
                stagingTexture.Dispose();
                stagingTexture = null;
            }

            if (processor != null)
            {
                processor.Close();
                processor = null;
            }
        }
Beispiel #4
0
        public void Setup(object pars)
        {
            logger.Debug("VideoCaptureSource::Setup()");

            if (State != CaptureState.Closed)
            {
                throw new InvalidOperationException("Invalid capture state " + State);
            }

            UvcDevice captureParams = pars as UvcDevice;

            var deviceId = captureParams.DeviceId;

            try
            {
                int adapterIndex = 0;
                using (var dxgiFactory = new SharpDX.DXGI.Factory1())
                {
                    using (var adapter = dxgiFactory.GetAdapter1(adapterIndex))
                    {
                        var deviceCreationFlags = //DeviceCreationFlags.Debug |
                                                  DeviceCreationFlags.VideoSupport |
                                                  DeviceCreationFlags.BgraSupport;

                        device = new Device(adapter, deviceCreationFlags);

                        using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>())
                        {
                            multiThread.SetMultithreadProtected(true);
                        }
                    }
                }

                sourceReader = CreateSourceReaderByDeviceId(deviceId);

                if (sourceReader == null)
                {
                    throw new Exception("Unable to create media source reader " + deviceId);
                }

                var mediaType = sourceReader.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream);

                logger.Debug("------------------CurrentMediaType-------------------");
                logger.Debug(MfTool.LogMediaType(mediaType));

                srcSize = MfTool.GetFrameSize(mediaType);

                var destSize = captureParams.Resolution;

                if (destSize.IsEmpty)
                {
                    destSize = srcSize;
                }

                var subtype = mediaType.Get(MediaTypeAttributeKeys.Subtype);


                mediaType?.Dispose();


                SharedTexture = new Texture2D(device,
                                              new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.None,
                    BindFlags         = BindFlags.RenderTarget | BindFlags.ShaderResource,
                    Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                    Width             = destSize.Width,
                    Height            = destSize.Height,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Default,
                    OptionFlags       = ResourceOptionFlags.Shared,
                });

                stagingTexture = new Texture2D(device,
                                               new Texture2DDescription
                {
                    CpuAccessFlags = CpuAccessFlags.Read,
                    //BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
                    BindFlags         = BindFlags.None,
                    Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                    Width             = destSize.Width,
                    Height            = destSize.Height,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging,
                    OptionFlags       = ResourceOptionFlags.None,
                });


                processor = new MfVideoProcessor(null);
                var intupArgs = new MfVideoArgs
                {
                    Width  = srcSize.Width,
                    Height = srcSize.Height,
                    Format = subtype,//VideoFormatGuids.NV12,
                };


                var outputArgs = new MfVideoArgs
                {
                    Width  = destSize.Width,
                    Height = destSize.Height,
                    Format = VideoFormatGuids.Argb32,
                };

                processor.Setup(intupArgs, outputArgs);
                processor.SetMirror(VideoProcessorMirror.MirrorVertical);

                state = CaptureState.Initialized;
            }
            catch (Exception ex)
            {
                logger.Error(ex);
                LastError = ex;
                errorCode = (int)SharedTypes.ErrorCode.NotInitialized;

                CleanUp();

                throw;
            }
        }
Beispiel #5
0
            public void Start1()
            {
                var flags = DeviceCreationFlags.VideoSupport |
                            DeviceCreationFlags.BgraSupport |
                            DeviceCreationFlags.Debug;

                var device = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware, flags);

                using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>())
                {
                    multiThread.SetMultithreadProtected(true);
                }


                System.Drawing.Bitmap bmp        = new System.Drawing.Bitmap(@"D:\Temp\4.bmp");
                Texture2D             rgbTexture = DxTool.GetTexture(bmp, device);

                var bufTexture = new Texture2D(device,
                                               new Texture2DDescription
                {
                    // Format = Format.NV12,
                    Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                    Width             = 1920,
                    Height            = 1080,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1 },
                });

                device.ImmediateContext.CopyResource(rgbTexture, bufTexture);

                var processor  = new MfVideoProcessor(device);
                var inProcArgs = new MfVideoArgs
                {
                    Width  = 1920,
                    Height = 1080,
                    Format = SharpDX.MediaFoundation.VideoFormatGuids.Argb32,
                };



                var outProcArgs = new MfVideoArgs
                {
                    Width  = 1920,
                    Height = 1080,
                    Format = SharpDX.MediaFoundation.VideoFormatGuids.NV12,                    //.Argb32,
                };

                processor.Setup(inProcArgs, outProcArgs);
                processor.Start();


                var rgbSample = MediaFactory.CreateVideoSampleFromSurface(null);

                // Create the media buffer from the texture
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out var mediaBuffer);

                using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>())
                {
                    mediaBuffer.CurrentLength = buffer2D.ContiguousLength;
                }

                rgbSample.AddBuffer(mediaBuffer);

                rgbSample.SampleTime     = 0;
                rgbSample.SampleDuration = 0;

                var result = processor.ProcessSample(rgbSample, out var nv12Sample);

                Task.Run(() =>
                {
                    Stopwatch sw = new Stopwatch();
                    int fps      = 60;
                    int interval = (int)(1000.0 / fps);

                    int _count = 1;

                    long globalTime = 0;


                    while (true)
                    {
                        if (result)
                        {
                            globalTime += sw.ElapsedMilliseconds;
                            sw.Restart();


                            nv12Sample.SampleTime     = MfTool.SecToMfTicks((globalTime / 1000.0));
                            nv12Sample.SampleDuration = MfTool.SecToMfTicks(((int)interval / 1000.0));

                            //sample.SampleTime = MfTool.SecToMfTicks((globalTime / 1000.0));
                            //sample.SampleDuration = MfTool.SecToMfTicks(((int)interval / 1000.0));

                            SampleReady?.Invoke(nv12Sample);


                            var msec = sw.ElapsedMilliseconds;

                            var delay = interval - msec;
                            if (delay < 0)
                            {
                                delay = 1;
                            }

                            // var delay = 1;
                            Thread.Sleep((int)delay);
                            var elapsedMilliseconds = sw.ElapsedMilliseconds;
                            globalTime += elapsedMilliseconds;
                            _count++;
                        }

                        //nv12Sample?.Dispose();

                        //Thread.Sleep(30);
                    }
                });
            }
        public void Setup(int deviceIndex = 0)
        {
            logger.Debug("VideoCaptureSource::Setup()");

            Activate[] activates = null;
            using (var attributes = new MediaAttributes())
            {
                MediaFactory.CreateAttributes(attributes, 1);
                attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid);

                activates = MediaFactory.EnumDeviceSources(attributes);
            }

            if (activates == null || activates.Length == 0)
            {
                logger.Error("SourceTypeVideoCapture not found");
                Console.ReadKey();
            }

            foreach (var activate in activates)
            {
                Console.WriteLine("---------------------------------------------");
                var friendlyName = activate.Get(CaptureDeviceAttributeKeys.FriendlyName);
                var isHwSource   = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource);
                //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers);
                var symbolicLink = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink);

                logger.Info("FriendlyName " + friendlyName + "\r\n" +
                            "isHwSource " + isHwSource + "\r\n" +
                            //"maxBuffers " + maxBuffers +
                            "symbolicLink " + symbolicLink);
            }


            var currentActivator = activates[deviceIndex];

            mediaSource = currentActivator.ActivateObject <MediaSource>();

            foreach (var a in activates)
            {
                a.Dispose();
            }

            using (var mediaAttributes = new MediaAttributes(IntPtr.Zero))
            {
                MediaFactory.CreateAttributes(mediaAttributes, 2);
                mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1);


                //var devMan = new DXGIDeviceManager();
                //devMan.ResetDevice(device);

                //mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan);


                //MediaFactory.CreateSourceReaderFromMediaSource(mediaSource, mediaAttributes, sourceReader);

                sourceReader = new SourceReader(mediaSource, mediaAttributes);
            }

            Console.WriteLine("------------------CurrentMediaType-------------------");
            var mediaType = sourceReader.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream);

            Console.WriteLine(MfTool.LogMediaType(mediaType));

            var frameSize = MfTool.GetFrameSize(mediaType);
            var subtype   = mediaType.Get(MediaTypeAttributeKeys.Subtype);


            mediaType?.Dispose();

            //Device device = null;
            int adapterIndex = 0;

            using (var dxgiFactory = new SharpDX.DXGI.Factory1())
            {
                var adapter = dxgiFactory.Adapters1[adapterIndex];

                device = new Device(adapter,
                                    //DeviceCreationFlags.Debug |
                                    DeviceCreationFlags.VideoSupport |
                                    DeviceCreationFlags.BgraSupport);

                using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>())
                {
                    multiThread.SetMultithreadProtected(true);
                }
            }


            SharedTexture = new Texture2D(device,
                                          new Texture2DDescription
            {
                CpuAccessFlags = CpuAccessFlags.None,
                BindFlags      = BindFlags.RenderTarget | BindFlags.ShaderResource,
                Format         = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                Width          = frameSize.Width,
                Height         = frameSize.Height,

                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Default,
                //OptionFlags = ResourceOptionFlags.GdiCompatible//ResourceOptionFlags.None,
                OptionFlags = ResourceOptionFlags.Shared,
            });

            texture = new Texture2D(device,
                                    new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                Width             = frameSize.Width,
                Height            = frameSize.Height,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging,
                OptionFlags       = ResourceOptionFlags.None,
            });


            processor = new MfVideoProcessor(null);
            var inProcArgs = new MfVideoArgs
            {
                Width  = frameSize.Width,
                Height = frameSize.Height,
                // Format = VideoFormatGuids.Rgb24,
                Format = subtype,//VideoFormatGuids.NV12,
            };


            var outProcArgs = new MfVideoArgs
            {
                Width  = frameSize.Width,
                Height = frameSize.Height,
                Format = VideoFormatGuids.Argb32,
                //Format = VideoFormatGuids.Rgb32,//VideoFormatGuids.Argb32,
            };

            processor.Setup(inProcArgs, outProcArgs);


            //processor.SetMirror(VideoProcessorMirror.MirrorHorizontal);
            processor.SetMirror(VideoProcessorMirror.MirrorVertical);
        }
Beispiel #7
0
        //public IntPtr hWnd = IntPtr.Zero;

        public void Setup(VideoEncoderSettings inputPars, VideoEncoderSettings outputPars, NetworkSettings networkPars)
        {
            logger.Debug("ScreenReceiver::Setup(...)");
            var inputArgs = new MfVideoArgs
            {
                Width     = inputPars.Resolution.Width,
                Height    = inputPars.Resolution.Height,
                FrameRate = MfTool.PackToLong(inputPars.FrameRate),
            };

            var outputArgs = new MfVideoArgs
            {
                Width  = outputPars.Resolution.Width,
                Height = outputPars.Resolution.Height,

                FrameRate = MfTool.PackToLong(outputPars.FrameRate),
            };


            int adapterIndex = 0;

            using (var dxgiFactory = new SharpDX.DXGI.Factory1())
            {
                using (var adapter = dxgiFactory.GetAdapter1(adapterIndex))
                {
                    device = new Device(adapter,
                                        //DeviceCreationFlags.Debug |
                                        DeviceCreationFlags.VideoSupport |
                                        DeviceCreationFlags.BgraSupport);

                    using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>())
                    {
                        multiThread.SetMultithreadProtected(true);
                    }
                }
            }

            sharedTexture = new Texture2D(device,
                                          new Texture2DDescription
            {
                CpuAccessFlags = CpuAccessFlags.None,
                BindFlags      = BindFlags.RenderTarget | BindFlags.ShaderResource,
                Format         = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                Width          = outputArgs.Width,  //640,//texture.Description.Width,
                Height         = outputArgs.Height, //480,//texture.Description.Height,

                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Default,
                //OptionFlags = ResourceOptionFlags.GdiCompatible//ResourceOptionFlags.None,
                OptionFlags = ResourceOptionFlags.Shared,
            });

            //ImageProvider = new D3DImageProvider(dispatcher);

            //decoder = new DXVADecoder(IntPtr.Zero);

            decoder = new MfH264Decoder(device);

            decoder.Setup(inputArgs);


            var decoderType  = decoder.OutputMediaType;
            var decFormat    = decoderType.Get(MediaTypeAttributeKeys.Subtype);
            var decFrameSize = MfTool.GetFrameSize(decoderType);


            processor = new MfVideoProcessor(device);
            var inProcArgs = new MfVideoArgs
            {
                Width  = decFrameSize.Width,
                Height = decFrameSize.Height,
                Format = decFormat,
            };



            var outProcArgs = new MfVideoArgs
            {
                Width  = outputArgs.Width,
                Height = outputArgs.Height,
                Format = VideoFormatGuids.Argb32,
            };

            processor.Setup(inProcArgs, outProcArgs);


            h264Session = new H264Session();

            if (networkPars.TransportMode == TransportMode.Tcp)
            {
                rtpReceiver = new RtpTcpReceiver(h264Session);
            }
            else if (networkPars.TransportMode == TransportMode.Udp)
            {
                rtpReceiver = new RtpUdpReceiver(h264Session);
            }
            else
            {
                throw new Exception("networkPars.TransportMode");
            }

            h264Session.SSRC = networkPars.SSRC;

            rtpReceiver.Open(networkPars);
            rtpReceiver.RtpPacketReceived += RtpReceiver_RtpPacketReceived;
        }