コード例 #1
0
 public void TestConstrutor()
 {
     CameraRGBFrameData data = new CameraRGBFrameData();
     bool pass = data.DataPointer != IntPtr.Zero;
     data.Dispose();
     Assert.AreEqual(true, pass);
 }
コード例 #2
0
 public void TestSize()
 {
     CameraRGBFrameData data = new CameraRGBFrameData();
     int expected = 512 * 424 * 12;
     bool pass = data.SizeInBytes == expected;
     data.Dispose();
     Assert.AreEqual(pass, true);
 }
コード例 #3
0
        public void TestDisposeAccess()
        {
            CameraRGBFrameData data = new CameraRGBFrameData();
            data.Dispose();

            //Should throw exception
            var pointer = data.DataPointer;
        }
コード例 #4
0
        public void TestMultipleDispose()
        {
            CameraRGBFrameData data = new CameraRGBFrameData();
            data.Dispose();
            //Second call to dispose should do nothing
            data.Dispose();

            Assert.AreEqual(data.DataPointer, IntPtr.Zero);
        }
コード例 #5
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColoredPointCloudView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColoredPointCloudView.fx", "PS");

            DX11NullInstancedDrawer nulldrawer = new DX11NullInstancedDrawer();
            nulldrawer.VertexCount = Consts.DepthWidth;
            nulldrawer.InstanceCount = Consts.DepthHeight;
            DX11NullGeometry nullGeom = new DX11NullGeometry(device, nulldrawer);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadRgb = false;

            CameraRGBFrameData cameraFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            DepthToColorFrameData depthToColorFrame = new DepthToColorFrameData();
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { cameraFrame.Update(sensor.CoordinateMapper, args.DepthData); depthToColorFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            //Get coordinate map + rgb
            ColorRGBAFrameData colorFrame = new ColorRGBAFrameData();
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorFrame = args.FrameData; uploadRgb = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, cameraFrame);
                    depthToColorTexture.Copy(context.Context, depthToColorFrame);
                    uploadCamera = false;
                }

                if (uploadRgb)
                {
                    colorTexture.Copy(context.Context, colorFrame);
                    uploadRgb = false;
                }

                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, colorTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, depthToColorTexture.ShaderView);

                context.Context.VertexShader.SetSampler(0, device.SamplerStates.LinearClamp);

                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                nullGeom.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            cameraTexture.Dispose();

            provider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            colorTexture.Dispose();
            colorProvider.Dispose();

            depthToColorFrame.Dispose();
            depthToColorTexture.Dispose();
        }
コード例 #6
0
 /// <summary>
 /// Copies frame data to GPU
 /// </summary>
 /// <param name="context">Device context</param>
 /// <param name="frame">Frame data</param>
 public void Copy(DeviceContext context, CameraRGBFrameData frame)
 {
     this.texture.Upload(context, frame.DataPointer, frame.SizeInBytes);
 }
コード例 #7
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple filtered point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            ComputeShader computeShader = ShaderCompiler.CompileFromFile<ComputeShader>(device, "PointCloudFilter.fx", "CS_Filter");

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "PointCloudJointView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "PointCloudJointView.fx", "PS");

            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, ColorTable);

            DX11NullGeometry nullGeom = new DX11NullGeometry(device);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;
            InstancedIndirectBuffer indirectDrawBuffer = new InstancedIndirectBuffer(device);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadBodyIndex = false;
            bool uploadBody = false;

            CameraRGBFrameData rgbFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { rgbFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            BodyIndexFrameData bodyIndexFrame = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexFrame = args.FrameData; uploadBodyIndex = true; };

            AppendPointCloudBuffer pointCloudBuffer = new AppendPointCloudBuffer(device);

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);
            bodyFrameProvider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; uploadBody = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, rgbFrame);
                    uploadCamera = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context.Context, bodyIndexFrame);
                    uploadBodyIndex = false;
                }

                if (uploadBody)
                {
                    positionBuffer.Copy(context, bodyFrame.TrackedOnly().ClosestBodies());
                    uploadBody = false;
                }

                //Prepare compute shader
                context.Context.ComputeShader.Set(computeShader);
                context.Context.ComputeShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(1, bodyIndexTexture.RawView); //Set raw view here, we do not sample

                context.Context.ComputeShader.SetUnorderedAccessView(0, pointCloudBuffer.UnorderedView, 0); //Don't forget to set count to 0

                context.Context.Dispatch(Consts.DepthWidth / 8, Consts.DepthHeight / 8, 1); //No iDivUp here, since it's not needed
                context.Context.ComputeShader.SetUnorderedAccessView(0, null); //Make runtime happy, and if we don't unbind we can't set as srv
                context.Context.CopyStructureCount(indirectDrawBuffer.ArgumentBuffer, 0, pointCloudBuffer.UnorderedView);

                //Draw filter buffer
                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, pointCloudBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                context.Context.DrawInstancedIndirect(indirectDrawBuffer.ArgumentBuffer, 0);

                context.Context.VertexShader.SetShaderResource(0, null); //Make runtime happy

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            cameraBuffer.Dispose();
            cameraTexture.Dispose();
            bodyIndexTexture.Dispose();

            provider.Dispose();
            bodyIndexProvider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            positionBuffer.Dispose();
            colorTableBuffer.Dispose();

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();
        }
コード例 #8
0
 public void TestDisposedSize()
 {
     CameraRGBFrameData data = new CameraRGBFrameData();
     data.Dispose();
     Assert.AreEqual(data.SizeInBytes, 0);
 }
コード例 #9
0
 public void TestDispose()
 {
     CameraRGBFrameData data = new CameraRGBFrameData();
     data.Dispose();
     Assert.AreEqual(data.DataPointer, IntPtr.Zero);
 }