Пример #1
0
 public override Boolean Validate(RenderDevice device)
 {
     var accuracy = PointerAccuracy.Fine;
     var desired = accuracy;
     //Nothing yet, so we assume we have a headless browser
     return desired == PointerAccuracy.None;
 }
Пример #2
0
 public override Boolean Validate(RenderDevice device)
 {
     var interlace = false;
     var desired = interlace;
     var available = device.IsInterlaced;
     return desired == available;
 }
Пример #3
0
 public override Boolean Validate(RenderDevice device)
 {
     var grid = false;
     var desired = grid;
     var available = device.IsGrid;
     return desired == available;
 }
Пример #4
0
        public override void Render(RenderDevice renderer, GameTime gameTime)
        {
            renderer.GraphicsDevice.ClearColor = Color.Black;
            
            tiles.ForEach(x => x.Render(renderer, gameTime));

            //renderer.DrawTexture(grassTexture2D, new Vector2(20, 20), Color.White);
        }
Пример #5
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect color sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool doUpload = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    colorTexture.Copy(context, currentData);
                    doUpload = false;
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorTexture.Dispose();
            provider.Dispose();

            sensor.Close();
        }
Пример #6
0
        public override Boolean Validate(RenderDevice device)
        {
            var state = ScriptingState.None;
            var options = device.Options;
            var available = ScriptingState.None;

            if (options != null && options.IsScripting())
                available = device.DeviceType == RenderDevice.Kind.Screen ? ScriptingState.Enabled : ScriptingState.InitialOnly;

            return state == available;
        }
Пример #7
0
        public override Boolean Validate(RenderDevice device)
        {
            var ratio = Tuple.Create(1f, 1f);
            var desired = ratio.Item1 / ratio.Item2;
            var available = (Single)device.ViewPortWidth / (Single)device.ViewPortHeight;

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;
            
            return desired == available;
        }
Пример #8
0
        public override Boolean Validate(RenderDevice device)
        {
            var res = new Resolution(72f, Resolution.Unit.Dpi);
            var desired = res.To(Resolution.Unit.Dpi);
            var available = (Single)device.Resolution;

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;

            return desired == available;
        }
Пример #9
0
        public override Boolean Validate(RenderDevice device)
        {
            var color = 1;
            var desired = color;
            var available = Math.Pow(device.ColorBits, 2);

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;

            return desired == available;
        }
        public override Boolean Validate(RenderDevice device)
        {
            var frequency = UpdateFrequency.Normal;
            var desired = frequency;
            var available = device.Frequency;

            if (available >= 30)
                return desired == UpdateFrequency.Normal;
            else if (available > 0)
                return desired == UpdateFrequency.Slow;

            return desired == UpdateFrequency.None;
        }
Пример #11
0
        public override Boolean Validate(RenderDevice device)
        {
            var value = 1f;
            var desired = value;
            var available = device.Resolution / 96f;

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;

            return desired == available;
        }
Пример #12
0
        public override Boolean Validate(RenderDevice device)
        {
            var length = Length.Zero;
            var desired = length.ToPixel();
            var available = (Single)device.ViewPortHeight;

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;

            return desired == available;
        }
Пример #13
0
        public override Boolean Validate(RenderDevice device)
        {
            var index = 0;
            var desired = index;
            var available = device.ColorBits;

            if (IsMaximum)
                return available <= desired;
            else if (IsMinimum)
                return available >= desired;

            return desired == available;
        }
Пример #14
0
    private void UpdateModelViewMatrix() // метод вызывается при измении свойств cameraAngle и cameraDistance
    {
        #region Обновление объектно-видовой матрицы ---------------------------------------------------------
        RenderDevice.AddScheduleTask((gl, s) => {
            gl.MatrixMode(OpenGL.GL_MODELVIEW);
            gl.LoadIdentity();

            gl.Translate(Shift.X, Shift.Y, 0.0);
            gl.Rotate(Rotation.X, 1.0, 0.0, 0.0);
            gl.Rotate(Rotation.Y, 0.0, 1.0, 0.0);
            gl.Rotate(Rotation.Z, 0.0, 0.0, 1.0);
            gl.Translate(Position.X, Position.Y, Position.Z);
            gl.Scale(Scale.X, Scale.Y, Scale.Z);
        });
        #endregion
    }
Пример #15
0
        public override Boolean Validate(RenderDevice device)
        {
            var desired   = _index;
            var available = device.MonochromeBits;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #16
0
        public override Boolean Validate(RenderDevice device)
        {
            var desired   = _color;
            var available = Math.Pow(device.ColorBits, 2);

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #17
0
        public override Boolean Validate(RenderDevice device)
        {
            var desired   = (Single)_ratio.Item1 / (Single)_ratio.Item2;
            var available = (Single)device.DeviceWidth / (Single)device.DeviceHeight;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #18
0
        public override Boolean Validate(RenderDevice device)
        {
            var desired   = _res.To(Resolution.Unit.Dpi);
            var available = (Single)device.Resolution;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #19
0
 private void UpdateDisplayMode() // метод вызывается при измении свойств cameraAngle и cameraDistance
 {
     #region Обновление объектно-видовой матрицы ---------------------------------------------------------
     RenderDevice.AddScheduleTask((gl, s) =>
     {
         if (Wireframe)
         {
             gl.PolygonMode(OpenGL.GL_FRONT_AND_BACK, OpenGL.GL_LINE);
         }
         else
         {
             gl.PolygonMode(OpenGL.GL_FRONT_AND_BACK, OpenGL.GL_FILL);
         }
     });
     #endregion
 }
Пример #20
0
        public override Boolean Validate(RenderDevice device)
        {
            var desired   = _length.ToPixel();
            var available = (Single)device.DeviceWidth;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #21
0
        public override Boolean Validate(RenderDevice device)
        {
            var ratio     = Tuple.Create(1f, 1f);
            var desired   = ratio.Item1 / ratio.Item2;
            var available = device.ViewPortWidth / (Single)device.ViewPortHeight;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
        public override Boolean Validate(RenderDevice device)
        {
            var frequency = UpdateFrequency.Normal;
            var desired   = frequency;
            var available = device.Frequency;

            if (available >= 30)
            {
                return(desired == UpdateFrequency.Normal);
            }
            else if (available > 0)
            {
                return(desired == UpdateFrequency.Slow);
            }

            return(desired == UpdateFrequency.None);
        }
Пример #23
0
        public override Boolean Validate(RenderDevice device)
        {
            var value     = 1f;
            var desired   = value;
            var available = device.Resolution / 96f;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
Пример #24
0
        public override Boolean Validate(RenderDevice device)
        {
            var length    = Length.Zero;
            var desired   = length.ToPixel();
            var available = (Single)device.ViewPortHeight;

            if (IsMaximum)
            {
                return(available <= desired);
            }
            else if (IsMinimum)
            {
                return(available >= desired);
            }

            return(desired == available);
        }
        // This updates the visual sector
        public void Update(RenderDevice graphics)
        {
            int numverts = 0;
            int v        = 0;

            // Trash geometry buffer
            if (geobuffer != null)
            {
                geobuffer.Dispose();
            }
            geobuffer = null;

            // Count the number of vertices there are
            foreach (VisualGeometry g in allgeometry)
            {
                if (g.Vertices != null)
                {
                    numverts += g.Vertices.Length;
                }
            }

            // Any vertics?
            if (numverts > 0)
            {
                // Make a new buffer
                geobuffer = new VertexBuffer();
                graphics.SetBufferData(geobuffer, numverts, VertexFormat.World);

                // Fill the buffer
                foreach (VisualGeometry g in allgeometry)
                {
                    if ((g.Vertices != null) && (g.Vertices.Length > 0))
                    {
                        graphics.SetBufferSubdata(geobuffer, v, g.Vertices);
                        g.VertexOffset = v;
                        v += g.Vertices.Length;
                    }
                }
            }

            this.sector.UpdateFogColor();             //mxd

            // Done
            updategeo = false;
        }
Пример #26
0
    protected override void OnMainWindowLoad(object sender, EventArgs args)
    {
        // Пример изменения внешниго вида элементов управления (необязательный код)
        base.RenderDevice.BufferBackCol = 0xB0;
        base.ValueStorage.Font          = new Font("Arial", 12f);
        //base.ValueStorage.ForeColor = Color.Firebrick;
        base.ValueStorage.RowHeight = 30;
        //base.ValueStorage.BackColor = Color.BlanchedAlmond;
        //base.MainWindow.BackColor = Color.DarkGoldenrod;
        base.ValueStorage.RightColWidth = 50;
        base.VSPanelWidth = 350;
        //base.VSPanelLeft = true;
        base.MainWindow.Size = new Size(960, 640);

        // Реализация управления мышкой с зажатыми левой и правой кнопкой мыши
        base.RenderDevice.MouseMoveWithLeftBtnDown += (s, e) =>
        {
            ShiftX += e.MovDeltaX;
            ShiftY -= e.MovDeltaY;
        };
        base.RenderDevice.MouseMoveWithRightBtnDown += (s, e) =>
        {
            //Rotation.X -= e.MovDeltaY;
            //Rotation.Y += e.MovDeltaX;
            Rotation = new DVector3(Rotation.X - e.MovDeltaY, Rotation.Y + e.MovDeltaX, Rotation.Z);
        };

        // Реализация управления клавиатурой
        RenderDevice.HotkeyRegister(Keys.Up, (s, e) => ++ ShiftY);
        RenderDevice.HotkeyRegister(Keys.Down, (s, e) => -- ShiftY);
        RenderDevice.HotkeyRegister(Keys.Left, (s, e) => -- ShiftX);
        RenderDevice.HotkeyRegister(Keys.Right, (s, e) => ++ ShiftX);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Up, (s, e) => ShiftY    += 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Down, (s, e) => ShiftY  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Left, (s, e) => ShiftX  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Right, (s, e) => ShiftX += 10);

        InitialWidth  = base.RenderDevice.Width;
        InitialHeight = base.RenderDevice.Height;

        ComputeObject();
        //Cube();
        //Tetrahedron();
        //Prism();
    }
Пример #27
0
        /// <summary>
        ///     Renders the object.
        /// </summary>
        /// <param name="renderer">The Renderer.</param>
        public void Render(RenderDevice renderer)
        {
            if (!Visible) return;

            if (!InstructionFlag)
            {
                renderer.DrawTexture(_texture, _position, _opacity);
                Vector2 dim = renderer.MeasureString(Instruction, _font);
                renderer.DrawString(Instruction, _font, new Vector2(_position.X - dim.X/2 + 28, _position.Y - 30),
                    Color.White);
            }
            else
            {
                renderer.DrawTexture(_texture, _position, _opacity);
                Vector2 dim = renderer.MeasureString(Instruction2, _font);
                renderer.DrawString(Instruction2, _font, new Vector2(_position.X - dim.X/2 + 28, _position.Y - 30),
                    Color.White);
            }
        }
Пример #28
0
        public GUI(string textureName, string defaultFontName, int defaultFontSize)
        {
            PlatformManager = new PlatformManager(CreateWxPlatformManager());

            Engine = new Engine(PlatformManager);
            Engine.Init();

            // Search for the location of the main assets folder.
            string assetsPath;
            if (!SearchForAssetsDirectory(out assetsPath))
                throw new Exception("Editor assets were not found");

            Archive = new ArchiveVirtual();
            Archive.MountDirectories(assetsPath, Allocator.GetHeap());

            ResourceManager = Engine.ResourceManager;
            ResourceManager.Archive = Archive;

            WindowManager = Engine.WindowManager;

            MainWindow = CreateWindow();
            var context = MainWindow.CreateContext(new RenderContextSettings());

            MainView = MainWindow.CreateView();
            MainView.ClearColor = Color.Red;

            MainWindow.MakeCurrent();
            context.Init();

            RenderDevice = Engine.RenderDevice;
            RenderDevice.ActiveView = MainView;

            var options = new ResourceLoadOptions {Name = textureName, AsynchronousLoad = false};
            var imageHandle = ResourceManager.LoadResource<Image>(options);
            if (imageHandle.Id == 0)
                return;

            Renderer = new GwenRenderer();
            Skin = new TexturedSkin(Renderer, imageHandle, new Flood.GUI.Font(defaultFontName, defaultFontSize));

            if (!Directory.Exists(StoredValuesDirectory))
                Directory.CreateDirectory(StoredValuesDirectory);
        }
Пример #29
0
        /// <summary>
        ///     Renders the game.
        /// </summary>
        /// <param name="renderer">The Renderer.</param>
        /// <param name="gameTime">The GameTime.</param>
        public override void OnRendering(RenderDevice renderer, GameTime gameTime)
        {
            renderer.Begin();
            DrawBackground(renderer);
            DrawRenderHint(renderer);
            if (_gameStarted)
            {
                DrawPipes(renderer);
                DrawPlayer(renderer);
                DrawScore(renderer);
            }

            if (!_gameStarted || _isDead)
            {
                _instructions.InstructionFlag = _isDead;
                DrawInstructions(renderer);
            }

            renderer.End();
        }
Пример #30
0
 // Token: 0x06000072 RID: 114 RVA: 0x0000C8BC File Offset: 0x0000AABC
 protected override void Draw(GameTime gameTime)
 {
     if (this.displayTitleUpdateMessage)
     {
         EngineCallbacks.ShowLiveUpdateMessage();
     }
     if (StageSystem.stageMode == 2)
     {
         EngineCallbacks.ProcessMainLoop();
     }
     if (RenderDevice.highResMode == 0)
     {
         RenderDevice.FlipScreen();
     }
     else
     {
         RenderDevice.FlipScreenHRes();
     }
     base.Draw(gameTime);
 }
Пример #31
0
    // ---------------------------------------------------------------------------------
    // ---------------------------------------------------------------------------------
    // --- Часть 2: Инициализация данных, управления и поведения приложения
    // ---------------------------------------------------------------------------------


    // Если нужна какая-то инициализация данных при запуске приложения, можно реализовать ее
    // в перегрузке данного события, вызываемого единожды перед отображением окна приложения
    protected override void OnMainWindowLoad(object sender, EventArgs args)
    {
        // Созданное приложение имеет два основных элемента управления:
        // base.RenderDevice - левая часть экрана для рисования
        // base.ValueStorage - правая панель для отображения и редактирования свойств

        // Пример изменения внешниго вида элементов управления (необязательный код)
        base.RenderDevice.BufferBackCol = 0xB0;
        base.ValueStorage.Font          = new Font("Arial", 12f);
        base.ValueStorage.ForeColor     = Color.Firebrick;
        base.ValueStorage.RowHeight     = 30;
        base.ValueStorage.BackColor     = Color.BlanchedAlmond;
        base.MainWindow.BackColor       = Color.DarkGoldenrod;
        base.ValueStorage.RightColWidth = 50;
        base.VSPanelWidth    = 300;
        base.VSPanelLeft     = true;
        base.MainWindow.Size = new Size(960, 640);


        // Реализация управления мышкой с зажатыми левой и правой кнопкой мыши
        base.RenderDevice.MouseMoveWithRightBtnDown += (s, e) => {
            ShiftX += e.MovDeltaX;
            ShiftY -= e.MovDeltaY;
        };
        base.RenderDevice.MouseMoveWithLeftBtnDown += (s, e) => {
            ShiftX += 10 * e.MovDeltaX;
            ShiftY -= 10 * e.MovDeltaY;
        };

        // Реализация управления клавиатурой
        RenderDevice.HotkeyRegister(Keys.Up, (s, e) => ++ ShiftY);
        RenderDevice.HotkeyRegister(Keys.Down, (s, e) => -- ShiftY);
        RenderDevice.HotkeyRegister(Keys.Left, (s, e) => -- ShiftX);
        RenderDevice.HotkeyRegister(Keys.Right, (s, e) => ++ ShiftX);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Up, (s, e) => ShiftY    += 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Down, (s, e) => ShiftY  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Left, (s, e) => ShiftX  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Right, (s, e) => ShiftX += 10);

        // ... расчет каких-то параметров или инициализация ещё чего-то, если нужно
    }
Пример #32
0
        /// <summary>
        ///     Renders the object.
        /// </summary>
        /// <param name="renderer">The Renderer.</param>
        public void Render(RenderDevice renderer)
        {
            if (!Visible)
            {
                return;
            }

            if (!InstructionFlag)
            {
                renderer.DrawTexture(_texture, _position, _opacity);
                Vector2 dim = renderer.MeasureString(Instruction, _font);
                renderer.DrawString(Instruction, _font, new Vector2(_position.X - dim.X / 2 + 28, _position.Y - 30),
                                    Color.White);
            }
            else
            {
                renderer.DrawTexture(_texture, _position, _opacity);
                Vector2 dim = renderer.MeasureString(Instruction2, _font);
                renderer.DrawString(Instruction2, _font, new Vector2(_position.X - dim.X / 2 + 28, _position.Y - 30),
                                    Color.White);
            }
        }
Пример #33
0
    private void UpdateModelViewMatrix() // метод вызывается при измении свойств cameraAngle и cameraDistance
    {
        #region Обновление объектно-видовой матрицы ---------------------------------------------------------
        RenderDevice.AddScheduleTask((gl, s) =>
        {
            gl.MatrixMode(OpenGL.GL_MODELVIEW);
            var deg2rad = Math.PI / 180; // Вращается камера, а не сам объект
            double phi  = deg2rad * cameraAngle.X;
            double teta = deg2rad * cameraAngle.Y;
            double psi  = deg2rad * cameraAngle.Z;
            // матрицы поворота вокруг осей
            DMatrix3 RX = new DMatrix3(1, 0, 0,
                                       0, Math.Cos(phi), -Math.Sin(phi),
                                       0, Math.Sin(phi), Math.Cos(phi));

            DMatrix3 RY = new DMatrix3(Math.Cos(teta), 0, Math.Sin(teta),
                                       0, 1, 0,
                                       -Math.Sin(teta), 0, Math.Cos(teta));

            DMatrix3 RZ = new DMatrix3(Math.Cos(psi), -Math.Sin(psi), 0,
                                       Math.Sin(psi), Math.Cos(psi), 0,
                                       0, 0, 1);
            var cameraTransform   = (RX * RY) * RZ;
            var cameraPosition    = cameraTransform * new DVector3(0, 0, cameraDistance);
            var cameraUpDirection = cameraTransform * new DVector3(0, 1, 0);
            // Мировая матрица (преобразование локальной системы координат в мировую)
            mMatrix = DMatrix4.Identity; // нет никаких преобразований над объекта
            // Видовая матрица (переход из мировой системы координат к системе координат камеры)
            vMatrix = LookAt(DMatrix4.Identity, cameraPosition, DVector3.Zero, cameraUpDirection);
            // матрица ModelView
            var mvMatrix = vMatrix * mMatrix;
            gl.LoadMatrix(mvMatrix.ToArray(true));


            //gl.Rotate(45, 1f, 0f, 0);
            //gl.Rotate(-45, 0f, 1f, 0);
        });
        #endregion
    }
Пример #34
0
        public static IDxTexture2D LoadFromFile(RenderDevice device, string path, CancellationToken ct, IProgress <bool> started = null)
        {
            if (ct.IsCancellationRequested)
            {
                throw new OperationCanceledException();
            }

            if (started != null)
            {
                started.Report(true);
            }
            //Thread.Sleep(4000);

            IDxTexture2D texture = TextureLoader.LoadFromFile(device, path);

            if (ct.IsCancellationRequested)
            {
                texture.Dispose();
                throw new OperationCanceledException();
            }
            return(texture);
        }
Пример #35
0
        public EditorBase()
        {
            PlatformManager = new PlatformManager(CreateWxPlatformManager());

            Engine = new Engine(PlatformManager);
            Engine.Init();

            // Search for the location of the main assets folder.
            string assetsPath;
            if (!SearchForAssetsDirectory(out assetsPath))
                throw new Exception("Editor assets were not found");

            Archive = new ArchiveVirtual();
            Archive.MountDirectories(assetsPath, Allocator.GetHeap());

            ResourceManager = Engine.ResourceManager;
            ResourceManager.Archive = Archive;

            EditorWindow = new EditorWindow();

            WindowManager = Engine.WindowManager;

            MainWindow = CreateWindow();
            var context = MainWindow.CreateContext(new RenderContextSettings());

            MainView = MainWindow.CreateView();
            MainView.ClearColor = Color.Red;

            MainWindow.Idle += Update;
            MainWindow.Render += Render;
            MainWindow.TargetResize += Resize;
            MainWindow.Show(true);

            MainWindow.MakeCurrent();
            context.Init();

            RenderDevice = Engine.RenderDevice;
        }
Пример #36
0
        private Texture GetTexture(RenderDevice device)
        {
            if (_textureSource == null)
            {
                return(null);
            }

            uint width  = Width;
            uint height = Height;

            if (_renderTarget == null /* ||
                                       * _renderTarget.Texture.Width != width ||
                                       * _renderTarget.Texture.Height != height*/)
            {
                _renderTarget = device.CreateRenderTarget("MediaPlayer", width, height, 1, false);
            }

            if (_stream != null)
            {
                if (HasNewFrame(_state))
                {
                    Tile tile = new Tile();
                    tile.X      = 0;
                    tile.Y      = 0;
                    tile.Width  = width;
                    tile.Height = height;
                    Tile[] tiles = new Tile[] { tile };

                    device.SetRenderTarget(_renderTarget);

                    RenderFrame(_state);

                    device.ResolveRenderTarget(_renderTarget, tiles);
                }
            }

            return(_renderTarget.Texture);
        }
Пример #37
0
        /// <summary>
        ///     Renders the object.
        /// </summary>
        /// <param name="renderer">The Renderer.</param>
        public void Render(RenderDevice renderer)
        {
            lock (_lockObj)
            {
                foreach (Pipe pipe in _pipes)
                {
                    if (pipe.Position.X > -60) //only draw if its in our view
                    {
                        renderer.DrawTexture(_pipeBody,
                                             new Rectangle(pipe.Position.X, pipe.Position.Y, 44, pipe.TopPipeHeight), Opacity);
                        renderer.DrawTexture(_pipeBottom,
                                             new Vector2(pipe.Position.X - 1, pipe.Position.Y + pipe.TopPipeHeight), Opacity);

                        renderer.DrawTexture(_pipeBody,
                                             new Rectangle(pipe.Position.X, pipe.BottomPipeY, 44, pipe.BottomPipeHeight), Opacity);

                        renderer.DrawTexture(_pipeBody, new Vector2(pipe.Position.X, pipe.BottomPipeY), Opacity);

                        renderer.DrawTexture(_pipeTop, new Vector2(pipe.Position.X - 1, pipe.BottomPipeY - 22), Opacity);
                    }
                }
            }
        }
Пример #38
0
        internal void Init(Flood.Window window, RenderDevice renderDevice,
            RenderContext renderContext)
        {
            nativeWindow = window;
            device = renderDevice;

            if (renderContext == null)
            {
                RenderContext = window.CreateContext(new RenderContextSettings());
                window.MakeCurrent();
                RenderContext.Init();
            }
            else
            {
                RenderContext = renderContext;
                nativeWindow.Context = RenderContext;
            }

            view = window.CreateView();

            window.Render += Render;
            window.TargetResize += Resize;
        }
Пример #39
0
        /// <summary>
        /// Validates the given medium against the provided rendering device.
        /// </summary>
        /// <param name="device">The current render device.</param>
        /// <returns>True if the constraints are satisfied, otherwise false.</returns>
        public Boolean Validate(RenderDevice device)
        {
            if (!String.IsNullOrEmpty(Type) && Types.Contains(Type) == IsInverse)
            {
                return(false);
            }

            if (IsInvalid(device, Keywords.Screen, RenderDevice.Kind.Screen) ||
                IsInvalid(device, Keywords.Speech, RenderDevice.Kind.Speech) ||
                IsInvalid(device, Keywords.Print, RenderDevice.Kind.Printer))
            {
                return(false);
            }

            foreach (var feature in _features)
            {
                if (feature.Validate(device) == IsInverse)
                {
                    return(false);
                }
            }

            return(true);
        }
Пример #40
0
        public Application()
        {
            PlatformManager = new PlatformManager(CreateWxPlatformManager());

            Engine = new Engine(PlatformManager);
            Engine.Init();

            // Search for the location of the main assets folder.
            string assetsPath;
            if (!SearchForAssetsDirectory(out assetsPath))
                throw new Exception("Editor assets were not found");

            Archive = new ArchiveVirtual();
            Archive.MountDirectories(assetsPath, Allocator.GetHeap());

            ResourceManager = Engine.ResourceManager;
            ResourceManager.Archive = Archive;

            NativeWindowManager = Engine.WindowManager;

            RenderDevice = Engine.RenderDevice;

            ModuleManager = new ModuleManager();

            RemotingManager = new RemotingManager(ModuleManager);

            ModuleManager.Init(RemotingManager.ServiceManager);

            //Initiate global services
            WindowManager = new Windows.WindowManager(this);

            RemotingManager.ServiceManager.GetCreateImplementation<IWindowManager>(WindowManager);

            // FIXME: Introduce a better system to get at global objects (dependency injection?)
            RemotingManager.ServiceManager.Application = this;
        }
Пример #41
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Camera Joint sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            DX11DepthStencil depthStencil = new DX11DepthStencil(device, swapChain.Width, swapChain.Height, eDepthFormat.d24s8);

            //VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorJointView.fx", "VS");
            SharpDX.D3DCompiler.ShaderSignature signature;
            VertexShader vertexShader = ShaderCompiler.CompileFromFile(device, "CameraJointView.fx", "VS_Color", out signature);
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "CameraJointView.fx", "PS_Color");

            VertexShader vertexShaderLine = ShaderCompiler.CompileFromFile<VertexShader>(device, "CameraJointView.fx", "VS");
            PixelShader pixelShaderLine = ShaderCompiler.CompileFromFile<PixelShader>(device, "CameraJointView.fx", "PS_White");

            JointTableIndexBuffer indexBuffer = new JointTableIndexBuffer(device, 6);

            DX11IndexedGeometry cube = device.Primitives.Box(new Box()
            {
                Size = new Vector3(0.05f)
            });
            DX11InstancedIndexedDrawer drawer = new DX11InstancedIndexedDrawer();
            cube.AssignDrawer(drawer);

            InputLayout layout;
            var bc = new ShaderBytecode(signature);
            cube.ValidateLayout(bc, out layout);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            Color4[] statusColor = new Color4[]
            {
                Color.Red,
                Color.Yellow,
                Color.Green
            };

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f, 1.3f, 0.1f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, statusColor);

            bool doQuit = false;
            bool doUpload = false;

            int bodyCount = 0;
            KinectBody[] bodyFrame = null;
            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            BodyJointStatusBuffer statusBuffer = new BodyJointStatusBuffer(device);

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            context.Context.OutputMerger.DepthStencilState = device.DepthStencilStates.LessReadWrite;
            context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var tracked = bodyFrame.TrackedOnly();
                    bodyCount = tracked.Count();

                    positionBuffer.Copy(context, tracked);
                    statusBuffer.Copy(context, tracked);
                    drawer.InstanceCount = tracked.Count() * Microsoft.Kinect.Body.JointCount;
                }

                context.RenderTargetStack.Push(depthStencil, false,swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                depthStencil.Clear(context);

                /*Position buffer and cbuffers are the same data and in same slot,
                 * so we bind them only once*/
                context.Context.VertexShader.SetShaderResource(0, positionBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                //Draw lines
                context.Context.PixelShader.Set(pixelShaderLine);
                context.Context.VertexShader.Set(vertexShaderLine);

                //Attach index buffer, null topology since we fetch
                indexBuffer.AttachWithLayout(context);
                indexBuffer.Draw(context,bodyCount);

                //Draw cubes
                cube.Bind(context, layout);
                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(1, statusBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);

                cube.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            depthStencil.Dispose();
            context.Dispose();
            device.Dispose();

            positionBuffer.Dispose();
            statusBuffer.Dispose();
            colorTableBuffer.Dispose();

            cameraBuffer.Dispose();

            provider.Dispose();
            cube.Dispose();
            layout.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();

            pixelShaderLine.Dispose();
            vertexShaderLine.Dispose();
            indexBuffer.Dispose();

            sensor.Close();
        }
Пример #42
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect depth local stream sample");

            RenderDevice  device    = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context   = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            PixelShader pixelShaderRaw = ShaderCompiler.CompileFromFile <PixelShader>(device, "DepthToWorld.fx", "PS_Raw");

            KinectSensor sensor = KinectSensor.GetDefault();

            sensor.Open();

            KinectFrameServer frameServer = new KinectFrameServer(32000, sensor);

            KinectFrameClient frameClient = new KinectFrameClient(IPAddress.Parse("127.0.0.1"), 32000);

            RayTableTexture     rayTable     = RayTableTexture.FromCoordinateMapper(device, sensor.CoordinateMapper);
            RenderCameraTexture renderCamera = new RenderCameraTexture(device);

            frameClient.Connect();


            bool doQuit = false;


            bool uploadDepth = false;
            bool uploadBody  = false;

            int mode = 0; //0 = body index, 1 = depth, 2 = world

            DepthFrameData      depthData = null;
            DynamicDepthTexture depth     = new DynamicDepthTexture(device);

            IDepthFrameProvider networkDepth = (IDepthFrameProvider)frameClient;

            networkDepth.FrameReceived += (sender, args) => { depthData = args.DepthData; uploadDepth = true; };

            BodyIndexFrameData      bodyIndexData    = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);

            IBodyIndexFrameProvider networkBody = (IBodyIndexFrameProvider)frameClient;

            networkBody.FrameReceived += (sender, args) => { bodyIndexData = args.FrameData; uploadBody = true; };


            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape)
                                                {
                                                    doQuit = true;
                                                }
                                                if (args.KeyCode == Keys.Space)
                                                {
                                                    mode++; if (mode > 2)
                                                    {
                                                        mode = 0;
                                                    }
                                                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadDepth)
                {
                    depth.Copy(context, depthData);
                    uploadDepth = false;

                    if (mode == 2)
                    {
                        //Convert depth to world
                        context.Context.OutputMerger.SetRenderTargets(renderCamera.RenderView);
                        device.Primitives.ApplyFullTriVS(context);
                        context.Context.PixelShader.Set(pixelShaderRaw);
                        context.Context.PixelShader.SetShaderResource(0, depth.RawView);

                        context.Context.PixelShader.SetShaderResource(1, rayTable.ShaderView);

                        device.Primitives.FullScreenTriangle.Draw(context);
                        context.RenderTargetStack.Apply();
                    }
                }

                if (uploadBody)
                {
                    bodyIndexTexture.Copy(context, bodyIndexData);
                    uploadBody = false;
                }

                context.RenderTargetStack.Push(swapChain);

                if (mode == 0)
                {
                    device.Primitives.ApplyFullTri(context, bodyIndexTexture.NormalizedView);
                }
                else if (mode == 1)
                {
                    device.Primitives.ApplyFullTri(context, depth.NormalizedView);
                }
                else
                {
                    device.Primitives.ApplyFullTri(context, renderCamera.ShaderView);
                }

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            depth.Dispose();
            bodyIndexTexture.Dispose();
            frameClient.Stop();
            frameServer.Dispose();

            rayTable.Dispose();
            renderCamera.Dispose();

            pixelShaderRaw.Dispose();

            sensor.Close();
        }
Пример #43
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect multiple hd faces projected to rgb");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Simple");
            VertexShader vertexShaderIndexed = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedTextureHdFaceView.fx", "VS_Indexed");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ProjectedTextureHdFaceView.fx", "PS");

            int maxFaceCount = Consts.MaxBodyCount;
            int faceVertexCount = (int)Microsoft.Kinect.Face.FaceModel.VertexCount;

            var vertRgbTempBuffer = new ColorSpacePoint[faceVertexCount];
            ColorSpacePoint[] facePoints = new ColorSpacePoint[faceVertexCount * maxFaceCount];

            DX11StructuredBuffer lookupBuffer = DX11StructuredBuffer.CreateDynamic<uint>(device, maxFaceCount);

            //Note : since in this case we use instancing, we only need a buffer for a single face
            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);

            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, maxFaceCount);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool invalidateFace = false;

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);
            BodyTrackingProcessor bodyTracker = new BodyTrackingProcessor();
            MultipleHdFaceProcessor multiFace = new MultipleHdFaceProcessor(sensor, bodyTracker, maxFaceCount);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyTracker.Next(args.FrameData);
            };

            multiFace.OnFrameResultsChanged += (sender, args) =>
            {
                invalidateFace = true;
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (invalidateFace)
                {
                    int offset = 0;
                    foreach (var data in multiFace.CurrentResults)
                    {
                        var vertices = data.FaceModel.CalculateVerticesForAlignment(data.FaceAlignment).ToArray();
                        sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgbTempBuffer);
                        Array.Copy(vertRgbTempBuffer, 0, facePoints, offset, faceVertexCount);
                        offset += faceVertexCount;
                    }
                    faceRgbBuffer.Copy(context, facePoints, multiFace.CurrentResults.Count * faceVertexCount);
                    invalidateFace = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                context.RenderTargetStack.Push(swapChain);

                context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;
                context.Context.OutputMerger.BlendState = device.BlendStates.Disabled;
                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                if (multiFace.CurrentResults.Count > 0)
                {
                    context.Context.VertexShader.SetShaderResource(0, faceRgbBuffer.ShaderView);
                    context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);
                    context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);

                    if (multiFace.CurrentResults.Count > 1)
                    {
                        uint[] buffer = new uint[multiFace.CurrentResults.Count];
                        for (uint i = 0; i < multiFace.CurrentResults.Count; i++)
                        {
                            buffer[i] = (uint)((i + 1) % multiFace.CurrentResults.Count);
                        }
                        lookupBuffer.WriteData(context, buffer);

                        context.Context.VertexShader.Set(vertexShaderIndexed);
                        context.Context.VertexShader.SetShaderResource(1, lookupBuffer.ShaderView);
                    }
                    else
                    {
                        context.Context.VertexShader.Set(vertexShader);
                    }

                    context.Context.PixelShader.Set(pixelShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.DrawInstanced(context, multiFace.CurrentResults.Count);
                }

                context.RenderTargetStack.Pop();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            faceIndexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();
            vertexShaderIndexed.Dispose();

            lookupBuffer.Dispose();

            sensor.Close();
        }
Пример #44
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect background subtraction sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            PixelShader depthPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterDepthView.fx", "PS_Sample");
            PixelShader rgbPixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "FilterRGBView.fx", "PS_Sample");

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool swapMode = false;

            bool uploadColor = false;
            bool uploadBodyIndex = false;

            //We need color and body index for subtraction
            ColorRGBAFrameData colorData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorData = args.FrameData; uploadColor = true; };

            BodyIndexFrameData bodyIndexData = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexData = args.FrameData; uploadBodyIndex = true; };

            bool uploadColorToDepth = false;
            bool uploadDepthToColor = false;
            ColorToDepthFrameData colorToDepthData = new ColorToDepthFrameData();
            DepthToColorFrameData depthToColorData = new DepthToColorFrameData();
            KinectSensorDepthFrameProvider depthProvider = new KinectSensorDepthFrameProvider(sensor);
            depthProvider.FrameReceived += (sender, args) =>
            {
                if (!swapMode)
                {
                    colorToDepthData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadColorToDepth = true;
                }
                else
                {
                    depthToColorData.Update(sensor.CoordinateMapper, args.DepthData);
                    uploadDepthToColor = true;
                }
            };

            DynamicColorToDepthTexture colorToDepthTexture = new DynamicColorToDepthTexture(device);
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } if (args.KeyCode == Keys.Space) { swapMode = !swapMode; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, colorData);
                    uploadColor = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context, bodyIndexData);
                    uploadBodyIndex = false;
                }

                if (uploadColorToDepth)
                {
                    colorToDepthTexture.Copy(context, colorToDepthData);
                    uploadColorToDepth = false;
                }

                if (uploadDepthToColor)
                {
                    depthToColorTexture.Copy(context, depthToColorData);
                    uploadDepthToColor = false;
                }

                ShaderResourceView view = swapMode ? depthToColorTexture.ShaderView : colorToDepthTexture.ShaderView;
                PixelShader shader = swapMode ? depthPixelShader : rgbPixelShader;

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTriVS(context);

                context.Context.PixelShader.Set(shader);
                context.Context.PixelShader.SetShaderResource(0, colorTexture.ShaderView);
                //Note: make sure to use normalized view as we use untyped resource here
                context.Context.PixelShader.SetShaderResource(1, bodyIndexTexture.NormalizedView);
                context.Context.PixelShader.SetShaderResource(2, view);

                context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            depthProvider.Dispose();
            colorToDepthData.Dispose();
            depthToColorData.Dispose();
            colorToDepthTexture.Dispose();
            depthToColorTexture.Dispose();

            colorTexture.Dispose();
            colorProvider.Dispose();

            bodyIndexData.Dispose();
            bodyIndexProvider.Dispose();

            depthPixelShader.Dispose();
            rgbPixelShader.Dispose();

            sensor.Close();
        }
Пример #45
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple filtered point cloud view sample");

            RenderDevice  device    = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context   = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            ComputeShader computeShader = ShaderCompiler.CompileFromFile <ComputeShader>(device, "PointCloudFilter.fx", "CS_Filter");

            VertexShader vertexShader = ShaderCompiler.CompileFromFile <VertexShader>(device, "PointCloudJointView.fx", "VS");
            PixelShader  pixelShader  = ShaderCompiler.CompileFromFile <PixelShader>(device, "PointCloudJointView.fx", "PS");

            BodyCameraPositionBuffer positionBuffer   = new BodyCameraPositionBuffer(device);
            DX11StructuredBuffer     colorTableBuffer = DX11StructuredBuffer.CreateImmutable <Color4>(device, ColorTable);


            DX11NullGeometry nullGeom = new DX11NullGeometry(device);

            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;
            InstancedIndirectBuffer indirectDrawBuffer = new InstancedIndirectBuffer(device);


            KinectSensor sensor = KinectSensor.GetDefault();

            sensor.Open();

            cbCamera camera = new cbCamera();

            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View       = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer <cbCamera> cameraBuffer = new ConstantBuffer <cbCamera>(device);

            cameraBuffer.Update(context, ref camera);

            bool doQuit          = false;
            bool uploadCamera    = false;
            bool uploadBodyIndex = false;
            bool uploadBody      = false;

            CameraRGBFrameData      rgbFrame      = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);

            provider.FrameReceived += (sender, args) => { rgbFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            BodyIndexFrameData                 bodyIndexFrame    = null;
            DynamicBodyIndexTexture            bodyIndexTexture  = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);

            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexFrame = args.FrameData; uploadBodyIndex = true; };

            AppendPointCloudBuffer pointCloudBuffer = new AppendPointCloudBuffer(device);

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);

            bodyFrameProvider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; uploadBody = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape)
                                                {
                                                    doQuit = true;
                                                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, rgbFrame);
                    uploadCamera = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context.Context, bodyIndexFrame);
                    uploadBodyIndex = false;
                }

                if (uploadBody)
                {
                    positionBuffer.Copy(context, bodyFrame.TrackedOnly().ClosestBodies());
                    uploadBody = false;
                }

                //Prepare compute shader
                context.Context.ComputeShader.Set(computeShader);
                context.Context.ComputeShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(1, bodyIndexTexture.RawView);               //Set raw view here, we do not sample

                context.Context.ComputeShader.SetUnorderedAccessView(0, pointCloudBuffer.UnorderedView, 0); //Don't forget to set count to 0

                context.Context.Dispatch(Consts.DepthWidth / 8, Consts.DepthHeight / 8, 1);                 //No iDivUp here, since it's not needed
                context.Context.ComputeShader.SetUnorderedAccessView(0, null);                              //Make runtime happy, and if we don't unbind we can't set as srv
                context.Context.CopyStructureCount(indirectDrawBuffer.ArgumentBuffer, 0, pointCloudBuffer.UnorderedView);

                //Draw filter buffer
                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, pointCloudBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                context.Context.DrawInstancedIndirect(indirectDrawBuffer.ArgumentBuffer, 0);

                context.Context.VertexShader.SetShaderResource(0, null); //Make runtime happy

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            cameraBuffer.Dispose();
            cameraTexture.Dispose();
            bodyIndexTexture.Dispose();

            provider.Dispose();
            bodyIndexProvider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            positionBuffer.Dispose();
            colorTableBuffer.Dispose();

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();
        }
Пример #46
0
    // ---------------------------------------------------------------------------------
    // ---------------------------------------------------------------------------------
    // --- Часть 2: Инициализация данных, управления и поведения приложения
    // ---------------------------------------------------------------------------------


    // Если нужна какая-то инициализация данных при запуске приложения, можно реализовать ее
    // в перегрузке данного события, вызываемого единожды перед отображением окна приложения
    protected override void OnMainWindowLoad(object sender, EventArgs args)
    {
        // Созданное приложение имеет два основных элемента управления:
        // base.RenderDevice - левая часть экрана для рисования
        // base.ValueStorage - правая панель для отображения и редактирования свойств

        // Пример изменения внешниго вида элементов управления (необязательный код)
        base.RenderDevice.BufferBackCol = 0xB0;
        base.ValueStorage.Font          = new Font("Arial", 12f);
        base.ValueStorage.ForeColor     = Color.Firebrick;
        base.ValueStorage.RowHeight     = 30;
        base.ValueStorage.BackColor     = Color.BlanchedAlmond;
        base.MainWindow.BackColor       = Color.DarkGoldenrod;
        base.ValueStorage.RightColWidth = 50;
        base.VSPanelWidth    = 300;
        base.VSPanelLeft     = true;
        base.MainWindow.Size = new Size(960, 640);

        // Реализация управления мышкой с зажатыми левой и правой кнопкой мыши
        base.RenderDevice.MouseMoveWithLeftBtnDown += (s, e) => {
            ShiftX += e.MovDeltaX;
            ShiftY -= e.MovDeltaY;
        };

        base.RenderDevice.MouseMoveWithRightBtnDown += (s, e) => {
            double centerX = Width / 2 + Offset.X + ShiftX;
            double centerY = Height / 2 - Offset.Y - ShiftY;

            double dx = e.MovDeltaX;
            double dy = e.MovDeltaY;

            double startX = e.X - dx;
            double startY = e.Y - dy;

            double curX = e.X;
            double curY = e.Y;

            double centerStartVecX = startX - centerX;
            double centerStartVecY = startY - centerY;

            double centerCurVecX = curX - centerX;
            double centerCurVecY = curY - centerY;

            // invert Oy axis
            centerCurVecY   = -centerCurVecY;
            centerStartVecY = -centerStartVecY;
            dy = -dy;

            double centerStartVecMod = Math.Sqrt(centerStartVecX * centerStartVecX + centerStartVecY * centerStartVecY);
            double centerCurVecMod   = Math.Sqrt(centerCurVecX * centerCurVecX + centerCurVecY * centerCurVecY);

            double denum = centerStartVecMod * centerCurVecMod;
            double cos;

            if (denum < Double.Epsilon)
            {
                cos = 1.0;
                Console.WriteLine("denum ~= 0");
            }
            else
            {
                cos = (centerStartVecX * centerCurVecX + centerStartVecY * centerCurVecY) / denum;
            }

            if (cos > 1.0)
            {
                cos = 1.0;
            }

            if (cos < -1.0)
            {
                cos = -1.0;
            }

            double deltaAngle = Math.Acos(cos) * 180.0 / Math.PI;

            double ResultAngle = deltaAngle + RotationAngle;

            if (centerStartVecY * dx + (-centerStartVecX) * dy < 0) // dot product of perpendicular center-start vector and delta vector
            {
                ResultAngle = RotationAngle - deltaAngle;
            }
            else
            {
                ResultAngle = RotationAngle + deltaAngle;
            }

            if (ResultAngle > 360.0)
            {
                ResultAngle -= 360.0;
            }

            if (ResultAngle < 0.0)
            {
                ResultAngle += 360.0;
            }

            RotationAngle = ResultAngle;
        };

        // Реализация управления клавиатурой
        RenderDevice.HotkeyRegister(Keys.Up, (s, e) => ++ ShiftY);
        RenderDevice.HotkeyRegister(Keys.Down, (s, e) => -- ShiftY);
        RenderDevice.HotkeyRegister(Keys.Left, (s, e) => -- ShiftX);
        RenderDevice.HotkeyRegister(Keys.Right, (s, e) => ++ ShiftX);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Up, (s, e) => ShiftY    += 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Down, (s, e) => ShiftY  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Left, (s, e) => ShiftX  -= 10);
        RenderDevice.HotkeyRegister(KeyMod.Shift, Keys.Right, (s, e) => ShiftX += 10);

        InitialWidth  = base.RenderDevice.Width;
        InitialHeight = base.RenderDevice.Height;

        PrevApproxLevel = ApproxLevel;
        PrevAmplitude   = Amplitude;

        // ... расчет каких-то параметров или инициализация ещё чего-то, если нужно
    }
Пример #47
0
 public override IDxGeometry GetGeometry(RenderDevice device)
 {
     return(device.Primitives.IcoGrid(this));
 }
Пример #48
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect simple pilot sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            //Allow to draw using direct2d on top of swapchain
            var context2d = new SharpDX.Direct2D1.DeviceContext(swapChain.Texture.QueryInterface<SharpDX.DXGI.Surface>());
            //Call release on texture since queryinterface does an addref
            Marshal.Release(swapChain.Texture.NativePointer);

            var textFormat = new SharpDX.DirectWrite.TextFormat(device.DWriteFactory, "Arial", 16.0f);

            var blackBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.Black);
            var whiteBrush = new SharpDX.Direct2D1.SolidColorBrush(context2d, SharpDX.Color.White);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;

            KinectSensorColorRGBAFrameProvider provider = new KinectSensorColorRGBAFrameProvider(sensor);
            DynamicColorRGBATextureProcessor colorProcessor = new DynamicColorRGBATextureProcessor(provider, device);

            KinectPilotProcessor pilot = KinectPilotProcessor.Default;

            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);
            bodyFrameProvider.FrameReceived += (sender, args) =>
            {
                var body = args.FrameData.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    pilot.Process(body.GetJointTable());
                }
            };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                colorProcessor.Update(context);

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTri(context, colorProcessor.Texture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();

                context2d.BeginDraw();

                var rect = new SharpDX.RectangleF(0, 0, 200, 130);
                context2d.FillRectangle(rect, blackBrush);
                context2d.DrawText("Elevation: " + pilot.Elevation, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Steering Y: " + pilot.SteeringY, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Steering Z: " + pilot.SterringZ, textFormat, rect, whiteBrush);
                rect.Top += 30;
                context2d.DrawText("Push: " + pilot.Push, textFormat, rect, whiteBrush);
                context2d.EndDraw();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProcessor.Dispose();
            provider.Dispose();

            sensor.Close();
        }
Пример #49
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColoredPointCloudView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ColoredPointCloudView.fx", "PS");

            DX11NullInstancedDrawer nulldrawer = new DX11NullInstancedDrawer();
            nulldrawer.VertexCount = Consts.DepthWidth;
            nulldrawer.InstanceCount = Consts.DepthHeight;
            DX11NullGeometry nullGeom = new DX11NullGeometry(device, nulldrawer);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadRgb = false;

            CameraRGBFrameData cameraFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            DepthToColorFrameData depthToColorFrame = new DepthToColorFrameData();
            DynamicDepthToColorTexture depthToColorTexture = new DynamicDepthToColorTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { cameraFrame.Update(sensor.CoordinateMapper, args.DepthData); depthToColorFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            //Get coordinate map + rgb
            ColorRGBAFrameData colorFrame = new ColorRGBAFrameData();
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { colorFrame = args.FrameData; uploadRgb = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, cameraFrame);
                    depthToColorTexture.Copy(context.Context, depthToColorFrame);
                    uploadCamera = false;
                }

                if (uploadRgb)
                {
                    colorTexture.Copy(context.Context, colorFrame);
                    uploadRgb = false;
                }

                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, colorTexture.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, depthToColorTexture.ShaderView);

                context.Context.VertexShader.SetSampler(0, device.SamplerStates.LinearClamp);

                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                nullGeom.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            cameraTexture.Dispose();

            provider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            colorTexture.Dispose();
            colorProvider.Dispose();

            depthToColorFrame.Dispose();
            depthToColorTexture.Dispose();
        }
Пример #50
0
        /// <summary>
        ///     Renders the object.
        /// </summary>
        /// <param name="renderer">The Renderer.</param>
        public void Render(RenderDevice renderer)
        {
            lock (_lockObj)
            {
                foreach (Pipe pipe in _pipes)
                {
                    if (pipe.Position.X > -60) //only draw if its in our view
                    {
                        renderer.DrawTexture(_pipeBody,
                            new Rectangle(pipe.Position.X, pipe.Position.Y, 44, pipe.TopPipeHeight), Opacity);
                        renderer.DrawTexture(_pipeBottom,
                            new Vector2(pipe.Position.X - 1, pipe.Position.Y + pipe.TopPipeHeight), Opacity);

                        renderer.DrawTexture(_pipeBody,
                            new Rectangle(pipe.Position.X, pipe.BottomPipeY, 44, pipe.BottomPipeHeight), Opacity);

                        renderer.DrawTexture(_pipeBody, new Vector2(pipe.Position.X, pipe.BottomPipeY), Opacity);

                        renderer.DrawTexture(_pipeTop, new Vector2(pipe.Position.X - 1, pipe.BottomPipeY - 22), Opacity);
                    }
                }
            }
        }
Пример #51
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple filtered point cloud view sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            ComputeShader computeShader = ShaderCompiler.CompileFromFile<ComputeShader>(device, "PointCloudFilter.fx", "CS_Filter");

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "PointCloudJointView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "PointCloudJointView.fx", "PS");

            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable<Color4>(device, ColorTable);

            DX11NullGeometry nullGeom = new DX11NullGeometry(device);
            nullGeom.Topology = SharpDX.Direct3D.PrimitiveTopology.PointList;
            InstancedIndirectBuffer indirectDrawBuffer = new InstancedIndirectBuffer(device);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f * 0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool uploadCamera = false;
            bool uploadBodyIndex = false;
            bool uploadBody = false;

            CameraRGBFrameData rgbFrame = new CameraRGBFrameData();
            DynamicCameraRGBTexture cameraTexture = new DynamicCameraRGBTexture(device);

            KinectSensorDepthFrameProvider provider = new KinectSensorDepthFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { rgbFrame.Update(sensor.CoordinateMapper, args.DepthData); uploadCamera = true; };

            BodyIndexFrameData bodyIndexFrame = null;
            DynamicBodyIndexTexture bodyIndexTexture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider bodyIndexProvider = new KinectSensorBodyIndexFrameProvider(sensor);
            bodyIndexProvider.FrameReceived += (sender, args) => { bodyIndexFrame = args.FrameData; uploadBodyIndex = true; };

            AppendPointCloudBuffer pointCloudBuffer = new AppendPointCloudBuffer(device);

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider bodyFrameProvider = new KinectSensorBodyFrameProvider(sensor);
            bodyFrameProvider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; uploadBody = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (uploadCamera)
                {
                    cameraTexture.Copy(context.Context, rgbFrame);
                    uploadCamera = false;
                }

                if (uploadBodyIndex)
                {
                    bodyIndexTexture.Copy(context.Context, bodyIndexFrame);
                    uploadBodyIndex = false;
                }

                if (uploadBody)
                {
                    positionBuffer.Copy(context, bodyFrame.TrackedOnly().ClosestBodies());
                    uploadBody = false;
                }

                //Prepare compute shader
                context.Context.ComputeShader.Set(computeShader);
                context.Context.ComputeShader.SetShaderResource(0, cameraTexture.ShaderView);
                context.Context.ComputeShader.SetShaderResource(1, bodyIndexTexture.RawView); //Set raw view here, we do not sample

                context.Context.ComputeShader.SetUnorderedAccessView(0, pointCloudBuffer.UnorderedView, 0); //Don't forget to set count to 0

                context.Context.Dispatch(Consts.DepthWidth / 8, Consts.DepthHeight / 8, 1); //No iDivUp here, since it's not needed
                context.Context.ComputeShader.SetUnorderedAccessView(0, null); //Make runtime happy, and if we don't unbind we can't set as srv
                context.Context.CopyStructureCount(indirectDrawBuffer.ArgumentBuffer, 0, pointCloudBuffer.UnorderedView);

                //Draw filter buffer
                context.RenderTargetStack.Push(swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(0, pointCloudBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(1, positionBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                nullGeom.Bind(context, null);
                context.Context.DrawInstancedIndirect(indirectDrawBuffer.ArgumentBuffer, 0);

                context.Context.VertexShader.SetShaderResource(0, null); //Make runtime happy

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            cameraBuffer.Dispose();
            cameraTexture.Dispose();
            bodyIndexTexture.Dispose();

            provider.Dispose();
            bodyIndexProvider.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();
            sensor.Close();

            positionBuffer.Dispose();
            colorTableBuffer.Dispose();

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();
        }
Пример #52
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple hd face sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile <VertexShader>(device, "HdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "HdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicHdFaceStructuredBuffer faceVertexBuffer = new DynamicHdFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f*0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 0.5f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    faceVertexBuffer.Copy(context, vertices);
                    doUpload = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                if (hdFaceProcessor.IsValid)
                {
                    context.RenderTargetStack.Push(swapChain);
                    context.Context.VertexShader.SetShaderResource(0, faceVertexBuffer.ShaderView);
                    context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);
                    context.RenderTargetStack.Pop();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            faceIndexBuffer.Dispose();
            faceVertexBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            hdFaceProcessor.Dispose();
            sensor.Close();
        }
Пример #53
0
 public StyleCollection(IEnumerable <CssStyleSheet> sheets, RenderDevice device)
 {
     _sheets = sheets;
     _device = device;
 }
Пример #54
0
 public override Boolean Validate(RenderDevice device)
 {
     return true;
 }
Пример #55
0
 public override IDxGeometry GetGeometry(RenderDevice device)
 {
     return(device.Primitives.Isocahedron(this));
 }
Пример #56
0
 public override IDxGeometry GetGeometry(RenderDevice device)
 {
     return(device.Primitives.RoundRect(this));
 }
Пример #57
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect body index sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "BodyIndexView.fx", "PS_NormalizedView");

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool doUpload = false;
            BodyIndexFrameData currentData = null;
            DynamicBodyIndexTexture texture = new DynamicBodyIndexTexture(device);
            KinectSensorBodyIndexFrameProvider provider = new KinectSensorBodyIndexFrameProvider(sensor);
            provider.FrameReceived += (sender, args) => { currentData = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    texture.Copy(context, currentData);
                    doUpload = false;
                }

                context.RenderTargetStack.Push(swapChain);

                device.Primitives.ApplyFullTriVS(context);

                context.Context.PixelShader.Set(pixelShader);
                context.Context.PixelShader.SetSampler(0, device.SamplerStates.LinearClamp);
                context.Context.PixelShader.SetShaderResource(0, texture.NormalizedView);

                device.Primitives.FullScreenTriangle.Draw(context);
                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            texture.Dispose();
            provider.Dispose();

            pixelShader.Dispose();

            sensor.Close();
        }
Пример #58
0
 /// <summary>
 /// The render.
 /// </summary>
 /// <param name="renderer">
 /// The renderer.
 /// </param>
 /// <param name="gameTime">
 /// The game time.
 /// </param>
 public void Render(RenderDevice renderer, GameTime gameTime)
 {
     renderer.DrawTexture(this.Texture2D, this.Location, Color.White);
 }
Пример #59
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Camera Joint sample");

            RenderDevice  device    = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context   = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            DX11DepthStencil depthStencil = new DX11DepthStencil(device, swapChain.Width, swapChain.Height, eDepthFormat.d24s8);


            //VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ColorJointView.fx", "VS");
            SharpDX.D3DCompiler.ShaderSignature signature;
            VertexShader vertexShader = ShaderCompiler.CompileFromFile(device, "CameraJointView.fx", "VS_Color", out signature);
            PixelShader  pixelShader  = ShaderCompiler.CompileFromFile <PixelShader>(device, "CameraJointView.fx", "PS_Color");

            VertexShader vertexShaderLine = ShaderCompiler.CompileFromFile <VertexShader>(device, "CameraJointView.fx", "VS");
            PixelShader  pixelShaderLine  = ShaderCompiler.CompileFromFile <PixelShader>(device, "CameraJointView.fx", "PS_White");

            JointTableIndexBuffer indexBuffer = new JointTableIndexBuffer(device, 6);

            DX11IndexedGeometry cube = device.Primitives.Box(new Box()
            {
                Size = new Vector3(0.05f)
            });
            DX11InstancedIndexedDrawer drawer = new DX11InstancedIndexedDrawer();

            cube.AssignDrawer(drawer);

            InputLayout layout;
            var         bc = new ShaderBytecode(signature);

            cube.ValidateLayout(bc, out layout);

            KinectSensor sensor = KinectSensor.GetDefault();

            sensor.Open();

            Color4[] statusColor = new Color4[]
            {
                Color.Red,
                Color.Yellow,
                Color.Green
            };

            cbCamera camera = new cbCamera();

            camera.Projection = Matrix.PerspectiveFovLH(1.57f, 1.3f, 0.1f, 100.0f);
            camera.View       = Matrix.Translation(0.0f, 0.0f, 2.0f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer <cbCamera> cameraBuffer = new ConstantBuffer <cbCamera>(device);

            cameraBuffer.Update(context, ref camera);

            DX11StructuredBuffer colorTableBuffer = DX11StructuredBuffer.CreateImmutable <Color4>(device, statusColor);

            bool doQuit   = false;
            bool doUpload = false;


            int bodyCount = 0;

            KinectBody[]             bodyFrame      = null;
            BodyCameraPositionBuffer positionBuffer = new BodyCameraPositionBuffer(device);
            BodyJointStatusBuffer    statusBuffer   = new BodyJointStatusBuffer(device);

            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            provider.FrameReceived += (sender, args) => { bodyFrame = args.FrameData; doUpload = true; };

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape)
                                                {
                                                    doQuit = true;
                                                }
            };


            context.Context.OutputMerger.DepthStencilState = device.DepthStencilStates.LessReadWrite;
            context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var tracked = bodyFrame.TrackedOnly();
                    bodyCount   = tracked.Count();

                    positionBuffer.Copy(context, tracked);
                    statusBuffer.Copy(context, tracked);
                    drawer.InstanceCount = tracked.Count() * Microsoft.Kinect.Body.JointCount;
                }

                context.RenderTargetStack.Push(depthStencil, false, swapChain);
                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                depthStencil.Clear(context);

                /*Position buffer and cbuffers are the same data and in same slot,
                 * so we bind them only once*/
                context.Context.VertexShader.SetShaderResource(0, positionBuffer.ShaderView);
                context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                //Draw lines
                context.Context.PixelShader.Set(pixelShaderLine);
                context.Context.VertexShader.Set(vertexShaderLine);

                //Attach index buffer, null topology since we fetch
                indexBuffer.AttachWithLayout(context);
                indexBuffer.Draw(context, bodyCount);

                //Draw cubes
                cube.Bind(context, layout);
                context.Context.VertexShader.Set(vertexShader);
                context.Context.PixelShader.Set(pixelShader);

                context.Context.VertexShader.SetShaderResource(1, statusBuffer.ShaderView);
                context.Context.VertexShader.SetShaderResource(2, colorTableBuffer.ShaderView);


                cube.Draw(context);

                context.RenderTargetStack.Pop();
                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            depthStencil.Dispose();
            context.Dispose();
            device.Dispose();

            positionBuffer.Dispose();
            statusBuffer.Dispose();
            colorTableBuffer.Dispose();

            cameraBuffer.Dispose();

            provider.Dispose();
            cube.Dispose();
            layout.Dispose();

            pixelShader.Dispose();
            vertexShader.Dispose();

            pixelShaderLine.Dispose();
            vertexShaderLine.Dispose();
            indexBuffer.Dispose();

            sensor.Close();
        }
Пример #60
0
 /// <summary>
 ///     Renders the object.
 /// </summary>
 /// <param name="renderer">The Renderer.</param>
 public void Render(RenderDevice renderer)
 {
     Vector2 dim = renderer.MeasureString(Score.ToString(CultureInfo.InvariantCulture), _font);
     renderer.DrawString(Score.ToString(CultureInfo.InvariantCulture), _font, new Vector2(320 - (dim.X/2), 50),
         Color.White);
 }