コード例 #1
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect multiple hd faces projected to rgb");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Simple");
            VertexShader vertexShaderIndexed = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Indexed");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ProjectedTextureHdFaceView.fx", "PS");

            int maxFaceCount = Consts.MaxBodyCount;
            int faceVertexCount = (int)Microsoft.Kinect.Face.FaceModel.VertexCount;

            var vertRgbTempBuffer = new ColorSpacePoint[faceVertexCount];
            ColorSpacePoint[] facePoints = new ColorSpacePoint[faceVertexCount * maxFaceCount];

            DX11StructuredBuffer lookupBuffer = DX11StructuredBuffer.CreateDynamic<uint>(device, maxFaceCount);

            //Note : since in this case we use instancing, we only need a buffer for a single face
            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);

            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, maxFaceCount);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool invalidateFace = false;

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            BodyTrackingProcessor bodyTracker = new BodyTrackingProcessor();
            MultipleHdFaceProcessor multiFace = new MultipleHdFaceProcessor(sensor, bodyTracker, maxFaceCount);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyTracker.Next(args.FrameData);
            };

            multiFace.OnFrameResultsChanged += (sender, args) =>
            {
                invalidateFace = true;
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (invalidateFace)
                {
                    int offset = 0;
                    foreach (var data in multiFace.CurrentResults)
                    {
                        var vertices = data.FaceModel.CalculateVerticesForAlignment(data.FaceAlignment).ToArray();
                        sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgbTempBuffer);
                        Array.Copy(vertRgbTempBuffer, 0, facePoints, offset, faceVertexCount);
                        offset += faceVertexCount;
                    }
                    faceRgbBuffer.Copy(context, facePoints, multiFace.CurrentResults.Count * faceVertexCount);
                    invalidateFace = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                context.RenderTargetStack.Push(swapChain);

                context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;
                context.Context.OutputMerger.BlendState = device.BlendStates.Disabled;
                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                if (multiFace.CurrentResults.Count > 0)
                {
                    context.Context.VertexShader.SetShaderResource(0, faceRgbBuffer.ShaderView);
                    context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);
                    context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);

                    if (multiFace.CurrentResults.Count > 1)
                    {
                        uint[] buffer = new uint[multiFace.CurrentResults.Count];
                        for (uint i = 0; i < multiFace.CurrentResults.Count; i++)
                        {
                            buffer[i] = (uint)((i + 1) % multiFace.CurrentResults.Count);
                        }
                        lookupBuffer.WriteData(context, buffer);

                        context.Context.VertexShader.Set(vertexShaderIndexed);
                        context.Context.VertexShader.SetShaderResource(1, lookupBuffer.ShaderView);
                    }
                    else
                    {
                        context.Context.VertexShader.Set(vertexShader);
                    }

                    context.Context.PixelShader.Set(pixelShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.DrawInstanced(context, multiFace.CurrentResults.Count);
                }

                context.RenderTargetStack.Pop();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            faceIndexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();
            vertexShaderIndexed.Dispose();

            lookupBuffer.Dispose();

            sensor.Close();
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple filtered point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            ComputeShader computeShader = ShaderCompiler.CompileFromFile<ComputeShader>(device, "PointCloudFilter.fx", "CS_Filter");

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "PointCloudJointView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "PointCloudJointView.fx", "PS");

            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, ColorTable);

            DX11NullGeometry nullGeom = new DX11NullGeometry(device);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;
            InstancedIndirectBuffer indirectDrawBuffer = new InstancedIndirectBuffer(device);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadBodyIndex = false;
            bool uploadBody = false;

            CameraRGBFrameData rgbFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { rgbFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            BodyIndexFrameData bodyIndexFrame = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexFrame = args.FrameData; uploadBodyIndex = true; };

            AppendPointCloudBuffer pointCloudBuffer = new AppendPointCloudBuffer(device);

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);
            bodyFrameProvider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; uploadBody = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, rgbFrame);
                    uploadCamera = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context.Context, bodyIndexFrame);
                    uploadBodyIndex = false;
                }

                if (uploadBody)
                {
                    positionBuffer.Copy(context, bodyFrame.TrackedOnly().ClosestBodies());
                    uploadBody = false;
                }

                //Prepare compute shader
                context.Context.ComputeShader.Set(computeShader);
                context.Context.ComputeShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(1, bodyIndexTexture.RawView); //Set raw view here, we do not sample

                context.Context.ComputeShader.SetUnorderedAccessView(0, pointCloudBuffer.UnorderedView, 0); //Don't forget to set count to 0

                context.Context.Dispatch(Consts.DepthWidth / 8, Consts.DepthHeight / 8, 1); //No iDivUp here, since it's not needed
                context.Context.ComputeShader.SetUnorderedAccessView(0, null); //Make runtime happy, and if we don't unbind we can't set as srv
                context.Context.CopyStructureCount(indirectDrawBuffer.ArgumentBuffer, 0, pointCloudBuffer.UnorderedView);

                //Draw filter buffer
                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, pointCloudBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                context.Context.DrawInstancedIndirect(indirectDrawBuffer.ArgumentBuffer, 0);

                context.Context.VertexShader.SetShaderResource(0, null); //Make runtime happy

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            cameraBuffer.Dispose();
            cameraTexture.Dispose();
            bodyIndexTexture.Dispose();

            provider.Dispose();
            bodyIndexProvider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            positionBuffer.Dispose();
            colorTableBuffer.Dispose();

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple hd face sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile <VertexShader>(device, "HdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "HdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicHdFaceStructuredBuffer faceVertexBuffer = new DynamicHdFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f*0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 0.5f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    faceVertexBuffer.Copy(context, vertices);
                    doUpload = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                if (hdFaceProcessor.IsValid)
                {
                    context.RenderTargetStack.Push(swapChain);
                    context.Context.VertexShader.SetShaderResource(0, faceVertexBuffer.ShaderView);
                    context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);
                    context.RenderTargetStack.Pop();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            faceIndexBuffer.Dispose();
            faceVertexBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            hdFaceProcessor.Dispose();
            sensor.Close();
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect simple pilot sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //Allow to draw using direct2d on top of swapchain
            var context2d = new SharpDX.Direct2D1.DeviceContext(swapChain.Texture.QueryInterface<SharpDX.DXGI.Surface>());
            //Call release on texture since queryinterface does an addref
            Marshal.Release(swapChain.Texture.NativePointer);

            var textFormat = new SharpDX.DirectWrite.TextFormat(device.DWriteFactory, "Arial", 16.0f);

            var blackBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.Black);
            var whiteBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.White);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;

            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            DynamicColorRGBATextureProcessor colorProcessor = new DynamicColorRGBATextureProcessor(provider, device);

            KinectPilotProcessor pilot = KinectPilotProcessor.Default;

            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);
            bodyFrameProvider.FrameReceived += (sender, args) =>
            {
                var body = args.FrameData.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    pilot.Process(body.GetJointTable());
                }
            };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                colorProcessor.Update(context);

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorProcessor.Texture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();

                context2d.BeginDraw();

                var rect = new SharpDX.RectangleF(0, 0, 200, 130);
                context2d.FillRectangle(rect, blackBrush);
                context2d.DrawText("Elevation: " + pilot.Elevation, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Steering Y: " + pilot.SteeringY, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Steering Z: " + pilot.SterringZ, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Push: " + pilot.Push, textFormat, rect, whiteBrush);
                context2d.EndDraw();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProcessor.Dispose();
            provider.Dispose();

            sensor.Close();
        }
コード例 #5
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Camera Joint sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            DX11DepthStencil depthStencil = new DX11DepthStencil(device, swapChain.Width, swapChain.Height, eDepthFormat.d24s8);

            //VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorJointView.fx", "VS");
            SharpDX.D3DCompiler.ShaderSignature signature;
            VertexShader vertexShader = ShaderCompiler.CompileFromFile(device, "CameraJointView.fx", "VS_Color", out signature);
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "CameraJointView.fx", "PS_Color");

            VertexShader vertexShaderLine = ShaderCompiler.CompileFromFile<VertexShader>(device, "CameraJointView.fx", "VS");
            PixelShader pixelShaderLine = ShaderCompiler.CompileFromFile<PixelShader>(device, "CameraJointView.fx", "PS_White");

            JointTableIndexBuffer indexBuffer = new JointTableIndexBuffer(device, 6);

            DX11IndexedGeometry cube = device.Primitives.Box(new Box()
            {
                Size = new Vector3(0.05f)
            });
            DX11InstancedIndexedDrawer drawer = new DX11InstancedIndexedDrawer();
            cube.AssignDrawer(drawer);

            InputLayout layout;
            var bc = new ShaderBytecode(signature);
            cube.ValidateLayout(bc, out layout);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            Color4[] statusColor = new Color4[]
            {
                Color.Red,
                Color.Yellow,
                Color.Green
            };

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f, 1.3f, 0.1f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, statusColor);

            bool doQuit = false;
            bool doUpload = false;

            int bodyCount = 0;
            KinectBody[] bodyFrame = null;
            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            BodyJointStatusBuffer statusBuffer = new BodyJointStatusBuffer(device);

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            context.Context.OutputMerger.DepthStencilState = device.DepthStencilStates.LessReadWrite;
            context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var tracked = bodyFrame.TrackedOnly();
                    bodyCount = tracked.Count();

                    positionBuffer.Copy(context, tracked);
                    statusBuffer.Copy(context, tracked);
                    drawer.InstanceCount = tracked.Count() * Microsoft.Kinect.Body.JointCount;
                }

                context.RenderTargetStack.Push(depthStencil, false,swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                depthStencil.Clear(context);

                /*Position buffer and cbuffers are the same data and in same slot,
                 * so we bind them only once*/
                context.Context.VertexShader.SetShaderResource(0, positionBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                //Draw lines
                context.Context.PixelShader.Set(pixelShaderLine);
                context.Context.VertexShader.Set(vertexShaderLine);

                //Attach index buffer, null topology since we fetch
                indexBuffer.AttachWithLayout(context);
                indexBuffer.Draw(context,bodyCount);

                //Draw cubes
                cube.Bind(context, layout);
                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(1, statusBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);

                cube.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            depthStencil.Dispose();
            context.Dispose();
            device.Dispose();

            positionBuffer.Dispose();
            statusBuffer.Dispose();
            colorTableBuffer.Dispose();

            cameraBuffer.Dispose();

            provider.Dispose();
            cube.Dispose();
            layout.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();

            pixelShaderLine.Dispose();
            vertexShaderLine.Dispose();
            indexBuffer.Dispose();

            sensor.Close();
        }
コード例 #6
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect hd face projected to rgb");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedHdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ProjectedHdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    var vertRgb = new ColorSpacePoint[vertices.Length];
                    sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgb);

                    faceRgbBuffer.Copy(context, vertRgb);
                    doUpload = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                context.RenderTargetStack.Push(swapChain);

                context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;
                context.Context.OutputMerger.BlendState = device.BlendStates.Disabled;
                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                if (hdFaceProcessor.IsValid)
                {
                    context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;
                    context.Context.OutputMerger.BlendState = device.BlendStates.AlphaBlend;
                    context.Context.VertexShader.SetShaderResource(0, faceRgbBuffer.ShaderView);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);

                }

                context.RenderTargetStack.Pop();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            faceIndexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            sensor.Close();
        }
コード例 #7
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect RGB Joint sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorJointView.fx", "VS");
            SharpDX.D3DCompiler.ShaderSignature signature;
            VertexShader vertexShader = ShaderCompiler.CompileFromFile(device, "ColorJointView.fx", "VS_Color", out signature);
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColorJointView.fx", "PS_Color");

            DX11IndexedGeometry circle = device.Primitives.Segment(new Segment()
            {
                Resolution = 32
            });
            DX11InstancedIndexedDrawer drawer = new DX11InstancedIndexedDrawer();
            circle.AssignDrawer(drawer);

            InputLayout layout;
            var bc = new ShaderBytecode(signature);
            circle.ValidateLayout(bc, out layout);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            Color4[] statusColor = new Color4[]
            {
                Color.Red,
                Color.Yellow,
                Color.Green
            };

            //Note cbuffer should have a minimum size of 16 bytes, so we create verctor4 instead of vector2
            SharpDX.Vector4 jointSize = new SharpDX.Vector4(0.04f,0.07f,0.0f,1.0f);
            ConstantBuffer<SharpDX.Vector4> cbSize = new ConstantBuffer<SharpDX.Vector4>(device);
            cbSize.Update(context, ref jointSize);

            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, statusColor);

            bool doQuit = false;
            bool doUpload = false;
            bool uploadImage = false;

            KinectBody[] bodyFrame = null;
            BodyColorPositionBuffer positionBuffer = new BodyColorPositionBuffer(device);
            BodyJointStatusBuffer statusBuffer = new BodyJointStatusBuffer(device);

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; doUpload = true; };

            ColorRGBAFrameData rgbFrame = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { rgbFrame = args.FrameData; uploadImage = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var tracked = bodyFrame.TrackedOnly();
                    var colorSpace = tracked.Select(kb => new ColorSpaceKinectJoints(kb, sensor.CoordinateMapper));

                    positionBuffer.Copy(context, colorSpace);
                    statusBuffer.Copy(context, tracked);
                    drawer.InstanceCount = colorSpace.Count() * Microsoft.Kinect.Body.JointCount;
                }

                if (uploadImage)
                {
                    colorTexture.Copy(context, rgbFrame);
                }

                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                circle.Bind(context, layout);

                context.Context.PixelShader.Set(pixelShader);
                context.Context.VertexShader.Set(vertexShader);
                context.Context.VertexShader.SetShaderResource(0, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, statusBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cbSize.Buffer);

                circle.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            positionBuffer.Dispose();
            statusBuffer.Dispose();
            colorTableBuffer.Dispose();

            cbSize.Dispose();

            provider.Dispose();
            circle.Dispose();
            layout.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();

            sensor.Close();
        }
コード例 #8
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect color sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //Allow to draw using direct2d on top of swapchain
            var context2d = new SharpDX.Direct2D1.DeviceContext(swapChain.Texture.QueryInterface<SharpDX.DXGI.Surface>());
            //Call release on texture since queryinterface does an addref
            Marshal.Release(swapChain.Texture.NativePointer);

            var whiteBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.White);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyProvider = new KinectSensorBodyFrameProvider(sensor);

            bool doQuit = false;
            bool doUpload = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceFrameResult frameResult = null;
            SingleFaceProcessor faceProcessor = new SingleFaceProcessor(sensor);
            faceProcessor.FaceResultAcquired += (sender, args) => { frameResult = args.FrameResult; };

            Func<PointF, Vector2> map = new Func<PointF, Vector2>((p) =>
            {
                float x = p.X / 1920.0f * (float)swapChain.Width;
                float y = p.Y / 1080.0f * (float)swapChain.Height;
                return new Vector2(x,y);
            });

            Func<float,float, Vector2> mapxy = new Func<float,float, Vector2>((px,py) =>
            {
                float x = px / 1920.0f * (float)swapChain.Width;
                float y = py / 1080.0f * (float)swapChain.Height;
                return new Vector2(x,y);
            });

            bodyProvider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    faceProcessor.AssignBody(body);
                }
                else
                {
                    faceProcessor.Suspend();
                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    colorTexture.Copy(context, currentData);
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();

                if (frameResult != null)
                {
                    context2d.BeginDraw();
                    var colorBound = frameResult.FaceBoundingBoxInColorSpace;
                    RectangleF rect = new RectangleF();
                    Vector2 topLeft = mapxy(colorBound.Left, colorBound.Top);
                    Vector2 bottomRight = mapxy(colorBound.Right, colorBound.Bottom);
                    rect.Top = topLeft.Y;
                    rect.Bottom = bottomRight.Y;
                    rect.Left = topLeft.X;
                    rect.Right = bottomRight.X;

                    context2d.DrawRectangle(rect, whiteBrush, 3.0f);

                    foreach (PointF point in frameResult.FacePointsInColorSpace.Values)
                    {
                        var ellipse = new SharpDX.Direct2D1.Ellipse()
                        {
                            Point = map(point),
                            RadiusX = 5,
                            RadiusY = 5
                        };

                        context2d.FillEllipse(ellipse, whiteBrush);
                    }

                    context2d.EndDraw();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorTexture.Dispose();
            provider.Dispose();

            bodyProvider.Dispose();
            faceProcessor.Dispose();

            whiteBrush.Dispose();
            context2d.Dispose();

            sensor.Close();
        }