コード例 #1
0
 public void TestConstrutor()
 {
     ColorToDepthFrameData data = new ColorToDepthFrameData();
     bool pass = data.DataPointer != IntPtr.Zero;
     data.Dispose();
     Assert.AreEqual(true, pass);
 }
コード例 #2
0
 public void TestSize()
 {
     ColorToDepthFrameData data = new ColorToDepthFrameData();
     int expected = 1920 * 1080 * 8;
     bool pass = data.SizeInBytes == expected;
     data.Dispose();
     Assert.AreEqual(pass, true);
 }
コード例 #3
0
        public void TestDisposeAccess()
        {
            ColorToDepthFrameData data = new ColorToDepthFrameData();
            data.Dispose();

            //Should throw exception
            var pointer = data.DataPointer;
        }
コード例 #4
0
        public void TestMultipleDispose()
        {
            ColorToDepthFrameData data = new ColorToDepthFrameData();
            data.Dispose();
            //Second call to dispose should do nothing
            data.Dispose();

            Assert.AreEqual(data.DataPointer, IntPtr.Zero);
        }
コード例 #5
0
 public void TestCopy()
 {
     using (ColorToDepthFrameData frame = new ColorToDepthFrameData())
     {
         using (DynamicColorToDepthTexture texture = new DynamicColorToDepthTexture(device))
         {
             texture.Copy(device.ImmediateContext, frame);
         }
     }
 }
コード例 #6
0
 public void TestDisposedSize()
 {
     ColorToDepthFrameData data = new ColorToDepthFrameData();
     data.Dispose();
     Assert.AreEqual(data.SizeInBytes, 0);
 }
コード例 #7
0
 public void TestDispose()
 {
     ColorToDepthFrameData data = new ColorToDepthFrameData();
     data.Dispose();
     Assert.AreEqual(data.DataPointer, IntPtr.Zero);
 }
コード例 #8
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect background subtraction sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            PixelShader depthPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterDepthView.fx", "PS_Sample");
            PixelShader rgbPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterRGBView.fx", "PS_Sample");

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool swapMode = false;

            bool uploadColor = false;
            bool uploadBodyIndex = false;

            //We need color and body index for subtraction
            ColorRGBAFrameData colorData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorData = args.FrameData; uploadColor = true; };

            BodyIndexFrameData bodyIndexData = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexData = args.FrameData; uploadBodyIndex = true; };

            bool uploadColorToDepth = false;
            bool uploadDepthToColor = false;
            ColorToDepthFrameData colorToDepthData = new ColorToDepthFrameData();
            DepthToColorFrameData depthToColorData = new DepthToColorFrameData();
            KinectSensorDepthFrameProvider depthProvider = new KinectSensorDepthFrameProvider(sensor);
            depthProvider.FrameReceived += (sender, args) =>
            {
                if (!swapMode)
                {
                    colorToDepthData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadColorToDepth = true;
                }
                else
                {
                    depthToColorData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadDepthToColor = true;
                }
            };

            DynamicColorToDepthTexture colorToDepthTexture = new DynamicColorToDepthTexture(device);
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } if (args.KeyCode == Keys.Space) { swapMode = !swapMode; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, colorData);
                    uploadColor = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context, bodyIndexData);
                    uploadBodyIndex = false;
                }

                if (uploadColorToDepth)
                {
                    colorToDepthTexture.Copy(context, colorToDepthData);
                    uploadColorToDepth = false;
                }

                if (uploadDepthToColor)
                {
                    depthToColorTexture.Copy(context, depthToColorData);
                    uploadDepthToColor = false;
                }

                ShaderResourceView view = swapMode ? depthToColorTexture.ShaderView : colorToDepthTexture.ShaderView;
                PixelShader shader = swapMode ? depthPixelShader : rgbPixelShader;

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTriVS(context);

                context.Context.PixelShader.Set(shader);
                context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);
                //Note: make sure to use normalized view as we use untyped resource here
                context.Context.PixelShader.SetShaderResource(1, bodyIndexTexture.NormalizedView);
                context.Context.PixelShader.SetShaderResource(2, view);

                context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            depthProvider.Dispose();
            colorToDepthData.Dispose();
            depthToColorData.Dispose();
            colorToDepthTexture.Dispose();
            depthToColorTexture.Dispose();

            colorTexture.Dispose();
            colorProvider.Dispose();

            bodyIndexData.Dispose();
            bodyIndexProvider.Dispose();

            depthPixelShader.Dispose();
            rgbPixelShader.Dispose();

            sensor.Close();
        }
コード例 #9
0
 /// <summary>
 /// Copy data fromcpu to gpu
 /// <remarks>In that case we should use immediate context, do not use a deffered context 
 /// unless you really know what you do</remarks>
 /// </summary>
 /// <param name="context">Device context</param>
 /// <param name="frame">Frame data</param>
 public void Copy(DeviceContext context, ColorToDepthFrameData frame)
 {
     this.texture.Upload(context, frame.DataPointer, frame.SizeInBytes);
 }