コード例 #1
0
 public void TestCreate()
 {
     using (DynamicColorRGBATexture texture = new DynamicColorRGBATexture(device))
     {
         Assert.AreNotEqual(texture.ShaderView.NativePointer, IntPtr.Zero);
     }
 }
コード例 #2
0
 public void TestCopy()
 {
     using (ColorRGBAFrameData frame = new ColorRGBAFrameData())
     {
         using (DynamicColorRGBATexture texture = new DynamicColorRGBATexture(device))
         {
             texture.Copy(device.ImmediateContext, frame);
         }
     }
 }
コード例 #3
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect color sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool doUpload = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    colorTexture.Copy(context, currentData);
                    doUpload = false;
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorTexture.Dispose();
            provider.Dispose();

            sensor.Close();
        }
コード例 #4
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="colorFrameProvider">Color frame provider</param>
        /// <param name="device">Direct3D11 device</param>
        public DynamicColorRGBATextureProcessor(IColorRGBAFrameProvider colorFrameProvider, Device device)
        {
            if (colorFrameProvider == null)
                throw new ArgumentNullException("colorFrameProvider");
            if (device == null)
                throw new ArgumentNullException("device");

            this.colorFrameProvider = colorFrameProvider;
            this.colorFrameProvider.FrameReceived += FrameReceived;
            this.colorTexture = new DynamicColorRGBATexture(device);
        }
コード例 #5
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect multiple hd faces projected to rgb");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Simple");
            VertexShader vertexShaderIndexed = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Indexed");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ProjectedTextureHdFaceView.fx", "PS");

            int maxFaceCount = Consts.MaxBodyCount;
            int faceVertexCount = (int)Microsoft.Kinect.Face.FaceModel.VertexCount;

            var vertRgbTempBuffer = new ColorSpacePoint[faceVertexCount];
            ColorSpacePoint[] facePoints = new ColorSpacePoint[faceVertexCount * maxFaceCount];

            DX11StructuredBuffer lookupBuffer = DX11StructuredBuffer.CreateDynamic<uint>(device, maxFaceCount);

            //Note : since in this case we use instancing, we only need a buffer for a single face
            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);

            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, maxFaceCount);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool invalidateFace = false;

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            BodyTrackingProcessor bodyTracker = new BodyTrackingProcessor();
            MultipleHdFaceProcessor multiFace = new MultipleHdFaceProcessor(sensor, bodyTracker, maxFaceCount);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyTracker.Next(args.FrameData);
            };

            multiFace.OnFrameResultsChanged += (sender, args) =>
            {
                invalidateFace = true;
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (invalidateFace)
                {
                    int offset = 0;
                    foreach (var data in multiFace.CurrentResults)
                    {
                        var vertices = data.FaceModel.CalculateVerticesForAlignment(data.FaceAlignment).ToArray();
                        sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgbTempBuffer);
                        Array.Copy(vertRgbTempBuffer, 0, facePoints, offset, faceVertexCount);
                        offset += faceVertexCount;
                    }
                    faceRgbBuffer.Copy(context, facePoints, multiFace.CurrentResults.Count * faceVertexCount);
                    invalidateFace = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                context.RenderTargetStack.Push(swapChain);

                context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;
                context.Context.OutputMerger.BlendState = device.BlendStates.Disabled;
                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                if (multiFace.CurrentResults.Count > 0)
                {
                    context.Context.VertexShader.SetShaderResource(0, faceRgbBuffer.ShaderView);
                    context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);
                    context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);

                    if (multiFace.CurrentResults.Count > 1)
                    {
                        uint[] buffer = new uint[multiFace.CurrentResults.Count];
                        for (uint i = 0; i < multiFace.CurrentResults.Count; i++)
                        {
                            buffer[i] = (uint)((i + 1) % multiFace.CurrentResults.Count);
                        }
                        lookupBuffer.WriteData(context, buffer);

                        context.Context.VertexShader.Set(vertexShaderIndexed);
                        context.Context.VertexShader.SetShaderResource(1, lookupBuffer.ShaderView);
                    }
                    else
                    {
                        context.Context.VertexShader.Set(vertexShader);
                    }

                    context.Context.PixelShader.Set(pixelShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.DrawInstanced(context, multiFace.CurrentResults.Count);
                }

                context.RenderTargetStack.Pop();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            faceIndexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();
            vertexShaderIndexed.Dispose();

            lookupBuffer.Dispose();

            sensor.Close();
        }
コード例 #6
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColoredPointCloudView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColoredPointCloudView.fx", "PS");

            DX11NullInstancedDrawer nulldrawer = new DX11NullInstancedDrawer();
            nulldrawer.VertexCount = Consts.DepthWidth;
            nulldrawer.InstanceCount = Consts.DepthHeight;
            DX11NullGeometry nullGeom = new DX11NullGeometry(device, nulldrawer);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadRgb = false;

            CameraRGBFrameData cameraFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            DepthToColorFrameData depthToColorFrame = new DepthToColorFrameData();
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { cameraFrame.Update(sensor.CoordinateMapper, args.DepthData); depthToColorFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            //Get coordinate map + rgb
            ColorRGBAFrameData colorFrame = new ColorRGBAFrameData();
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorFrame = args.FrameData; uploadRgb = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, cameraFrame);
                    depthToColorTexture.Copy(context.Context, depthToColorFrame);
                    uploadCamera = false;
                }

                if (uploadRgb)
                {
                    colorTexture.Copy(context.Context, colorFrame);
                    uploadRgb = false;
                }

                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, colorTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, depthToColorTexture.ShaderView);

                context.Context.VertexShader.SetSampler(0, device.SamplerStates.LinearClamp);

                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                nullGeom.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            cameraTexture.Dispose();

            provider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            colorTexture.Dispose();
            colorProvider.Dispose();

            depthToColorFrame.Dispose();
            depthToColorTexture.Dispose();
        }
コード例 #7
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect background subtraction sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            PixelShader depthPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterDepthView.fx", "PS_Sample");
            PixelShader rgbPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterRGBView.fx", "PS_Sample");

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool swapMode = false;

            bool uploadColor = false;
            bool uploadBodyIndex = false;

            //We need color and body index for subtraction
            ColorRGBAFrameData colorData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorData = args.FrameData; uploadColor = true; };

            BodyIndexFrameData bodyIndexData = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexData = args.FrameData; uploadBodyIndex = true; };

            bool uploadColorToDepth = false;
            bool uploadDepthToColor = false;
            ColorToDepthFrameData colorToDepthData = new ColorToDepthFrameData();
            DepthToColorFrameData depthToColorData = new DepthToColorFrameData();
            KinectSensorDepthFrameProvider depthProvider = new KinectSensorDepthFrameProvider(sensor);
            depthProvider.FrameReceived += (sender, args) =>
            {
                if (!swapMode)
                {
                    colorToDepthData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadColorToDepth = true;
                }
                else
                {
                    depthToColorData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadDepthToColor = true;
                }
            };

            DynamicColorToDepthTexture colorToDepthTexture = new DynamicColorToDepthTexture(device);
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } if (args.KeyCode == Keys.Space) { swapMode = !swapMode; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, colorData);
                    uploadColor = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context, bodyIndexData);
                    uploadBodyIndex = false;
                }

                if (uploadColorToDepth)
                {
                    colorToDepthTexture.Copy(context, colorToDepthData);
                    uploadColorToDepth = false;
                }

                if (uploadDepthToColor)
                {
                    depthToColorTexture.Copy(context, depthToColorData);
                    uploadDepthToColor = false;
                }

                ShaderResourceView view = swapMode ? depthToColorTexture.ShaderView : colorToDepthTexture.ShaderView;
                PixelShader shader = swapMode ? depthPixelShader : rgbPixelShader;

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTriVS(context);

                context.Context.PixelShader.Set(shader);
                context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);
                //Note: make sure to use normalized view as we use untyped resource here
                context.Context.PixelShader.SetShaderResource(1, bodyIndexTexture.NormalizedView);
                context.Context.PixelShader.SetShaderResource(2, view);

                context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            depthProvider.Dispose();
            colorToDepthData.Dispose();
            depthToColorData.Dispose();
            colorToDepthTexture.Dispose();
            depthToColorTexture.Dispose();

            colorTexture.Dispose();
            colorProvider.Dispose();

            bodyIndexData.Dispose();
            bodyIndexProvider.Dispose();

            depthPixelShader.Dispose();
            rgbPixelShader.Dispose();

            sensor.Close();
        }
コード例 #8
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect hd face sample with color map");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorHdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColorHdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicHdFaceStructuredBuffer faceVertexBuffer = new DynamicHdFaceStructuredBuffer(device, 1);
            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 0.5f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    var vertRgb = new ColorSpacePoint[vertices.Length];
                    sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgb);

                    faceVertexBuffer.Copy(context, vertices);
                    faceRgbBuffer.Copy(context, vertRgb);
                    doUpload = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                if (hdFaceProcessor.IsValid)
                {
                    context.RenderTargetStack.Push(swapChain);
                    context.Context.VertexShader.SetShaderResource(0, faceVertexBuffer.ShaderView);
                    context.Context.VertexShader.SetShaderResource(1, faceRgbBuffer.ShaderView);
                    context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                    context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);
                    context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);
                    context.RenderTargetStack.Pop();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            cameraBuffer.Dispose();
            faceIndexBuffer.Dispose();
            faceVertexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            sensor.Close();
        }
コード例 #9
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect RGB Joint sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorJointView.fx", "VS");
            SharpDX.D3DCompiler.ShaderSignature signature;
            VertexShader vertexShader = ShaderCompiler.CompileFromFile(device, "ColorJointView.fx", "VS_Color", out signature);
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColorJointView.fx", "PS_Color");

            DX11IndexedGeometry circle = device.Primitives.Segment(new Segment()
            {
                Resolution = 32
            });
            DX11InstancedIndexedDrawer drawer = new DX11InstancedIndexedDrawer();
            circle.AssignDrawer(drawer);

            InputLayout layout;
            var bc = new ShaderBytecode(signature);
            circle.ValidateLayout(bc, out layout);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            Color4[] statusColor = new Color4[]
            {
                Color.Red,
                Color.Yellow,
                Color.Green
            };

            //Note cbuffer should have a minimum size of 16 bytes, so we create verctor4 instead of vector2
            SharpDX.Vector4 jointSize = new SharpDX.Vector4(0.04f,0.07f,0.0f,1.0f);
            ConstantBuffer<SharpDX.Vector4> cbSize = new ConstantBuffer<SharpDX.Vector4>(device);
            cbSize.Update(context, ref jointSize);

            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, statusColor);

            bool doQuit = false;
            bool doUpload = false;
            bool uploadImage = false;

            KinectBody[] bodyFrame = null;
            BodyColorPositionBuffer positionBuffer = new BodyColorPositionBuffer(device);
            BodyJointStatusBuffer statusBuffer = new BodyJointStatusBuffer(device);

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; doUpload = true; };

            ColorRGBAFrameData rgbFrame = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { rgbFrame = args.FrameData; uploadImage = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var tracked = bodyFrame.TrackedOnly();
                    var colorSpace = tracked.Select(kb => new ColorSpaceKinectJoints(kb, sensor.CoordinateMapper));

                    positionBuffer.Copy(context, colorSpace);
                    statusBuffer.Copy(context, tracked);
                    drawer.InstanceCount = colorSpace.Count() * Microsoft.Kinect.Body.JointCount;
                }

                if (uploadImage)
                {
                    colorTexture.Copy(context, rgbFrame);
                }

                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                circle.Bind(context, layout);

                context.Context.PixelShader.Set(pixelShader);
                context.Context.VertexShader.Set(vertexShader);
                context.Context.VertexShader.SetShaderResource(0, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, statusBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cbSize.Buffer);

                circle.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            positionBuffer.Dispose();
            statusBuffer.Dispose();
            colorTableBuffer.Dispose();

            cbSize.Dispose();

            provider.Dispose();
            circle.Dispose();
            layout.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();

            sensor.Close();
        }
コード例 #10
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple filtered point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            ComputeShader computeShader = ShaderCompiler.CompileFromFile<ComputeShader>(device, "ColoredPointCloudFilter.fx", "CS_Filter");

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColoredPointCloudView.fx", "VS_Indirect");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColoredPointCloudView.fx", "PS");

            DX11NullGeometry nullGeom = new DX11NullGeometry(device);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;
            InstancedIndirectBuffer indirectDrawBuffer = new InstancedIndirectBuffer(device);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadBodyIndex = false;
            bool uploadRgb = false;

            CameraRGBFrameData rgbFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);
            DepthToColorFrameData depthToColorFrame = new DepthToColorFrameData();
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { rgbFrame.Update(sensor.CoordinateMapper, args.DepthData); depthToColorFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            BodyIndexFrameData bodyIndexFrame = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexFrame = args.FrameData; uploadBodyIndex = true; };

            //Get coordinate map + rgb
            ColorRGBAFrameData colorFrame = new ColorRGBAFrameData();
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorFrame = args.FrameData; uploadRgb = true; };

            CounterPointCloudBuffer pointCloudBuffer = new CounterPointCloudBuffer(device);
            ColorPointCloudBuffer colorBuffer = new ColorPointCloudBuffer(device);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, rgbFrame);
                    depthToColorTexture.Copy(context.Context, depthToColorFrame);
                    uploadCamera = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context.Context, bodyIndexFrame);
                    uploadBodyIndex = false;
                }

                if (uploadRgb)
                {
                    colorTexture.Copy(context.Context, colorFrame);
                    uploadRgb = false;
                }

                //Prepare compute shader
                context.Context.ComputeShader.Set(computeShader);
                context.Context.ComputeShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(1, bodyIndexTexture.RawView); //Set raw view here, we do not sample
                context.Context.ComputeShader.SetShaderResource(2, colorTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(3, depthToColorTexture.ShaderView);

                context.Context.ComputeShader.SetSampler(0, device.SamplerStates.LinearClamp);

                context.Context.ComputeShader.SetUnorderedAccessView(0, pointCloudBuffer.UnorderedView, 0); //Don't forget to set count to 0
                context.Context.ComputeShader.SetUnorderedAccessView(1, colorBuffer.UnorderedView);

                context.Context.Dispatch(Consts.DepthWidth / 8, Consts.DepthHeight / 8, 1); //No iDivUp here, since it's not needed
                context.Context.ComputeShader.SetUnorderedAccessView(0, null); //Make runtime happy, and if we don't unbind we can't set as srv
                context.Context.ComputeShader.SetUnorderedAccessView(1, null);
                context.Context.CopyStructureCount(indirectDrawBuffer.ArgumentBuffer, 0, pointCloudBuffer.UnorderedView);

                //Draw filter buffer
                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, pointCloudBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, colorBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                context.Context.DrawInstancedIndirect(indirectDrawBuffer.ArgumentBuffer, 0);

                //Make runtime happy
                context.Context.VertexShader.SetShaderResource(0, null);
                context.Context.VertexShader.SetShaderResource(1, null);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            cameraTexture.Dispose();
            bodyIndexTexture.Dispose();

            provider.Dispose();
            bodyIndexProvider.Dispose();

            pointCloudBuffer.Dispose();
            colorBuffer.Dispose();

            colorTexture.Dispose();
            colorProvider.Dispose();

            depthToColorFrame.Dispose();
            depthToColorTexture.Dispose();

            computeShader.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();
        }
コード例 #11
0
ファイル: Program.cs プロジェクト: semihguresci/kgp
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect color sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //Allow to draw using direct2d on top of swapchain
            var context2d = new SharpDX.Direct2D1.DeviceContext(swapChain.Texture.QueryInterface<SharpDX.DXGI.Surface>());
            //Call release on texture since queryinterface does an addref
            Marshal.Release(swapChain.Texture.NativePointer);

            var whiteBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.White);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyProvider = new KinectSensorBodyFrameProvider(sensor);

            bool doQuit = false;
            bool doUpload = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceFrameResult frameResult = null;
            SingleFaceProcessor faceProcessor = new SingleFaceProcessor(sensor);
            faceProcessor.FaceResultAcquired += (sender, args) => { frameResult = args.FrameResult; };

            Func<PointF, Vector2> map = new Func<PointF, Vector2>((p) =>
            {
                float x = p.X / 1920.0f * (float)swapChain.Width;
                float y = p.Y / 1080.0f * (float)swapChain.Height;
                return new Vector2(x,y);
            });

            Func<float,float, Vector2> mapxy = new Func<float,float, Vector2>((px,py) =>
            {
                float x = px / 1920.0f * (float)swapChain.Width;
                float y = py / 1080.0f * (float)swapChain.Height;
                return new Vector2(x,y);
            });

            bodyProvider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    faceProcessor.AssignBody(body);
                }
                else
                {
                    faceProcessor.Suspend();
                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    colorTexture.Copy(context, currentData);
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();

                if (frameResult != null)
                {
                    context2d.BeginDraw();
                    var colorBound = frameResult.FaceBoundingBoxInColorSpace;
                    RectangleF rect = new RectangleF();
                    Vector2 topLeft = mapxy(colorBound.Left, colorBound.Top);
                    Vector2 bottomRight = mapxy(colorBound.Right, colorBound.Bottom);
                    rect.Top = topLeft.Y;
                    rect.Bottom = bottomRight.Y;
                    rect.Left = topLeft.X;
                    rect.Right = bottomRight.X;

                    context2d.DrawRectangle(rect, whiteBrush, 3.0f);

                    foreach (PointF point in frameResult.FacePointsInColorSpace.Values)
                    {
                        var ellipse = new SharpDX.Direct2D1.Ellipse()
                        {
                            Point = map(point),
                            RadiusX = 5,
                            RadiusY = 5
                        };

                        context2d.FillEllipse(ellipse, whiteBrush);
                    }

                    context2d.EndDraw();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorTexture.Dispose();
            provider.Dispose();

            bodyProvider.Dispose();
            faceProcessor.Dispose();

            whiteBrush.Dispose();
            context2d.Dispose();

            sensor.Close();
        }
コード例 #12
0
 public void TestNullDevice()
 {
     using (DynamicColorRGBATexture texture = new DynamicColorRGBATexture(null))
     {
     }
 }