public void Render(Matrix pTransforme, float pScale)
        {
            mTimer = (float)(KryptonEngine.EngineSettings.Time.TotalGameTime.TotalMilliseconds / 10000);

            mGrapicsDevice.SetRenderTarget(mFogTarget);
            mGrapicsDevice.Clear(Color.Transparent);


            mFogShader.Parameters["View"].SetValue(pTransforme.Translation);
            mFogShader.Parameters["Scale"].SetValue(pScale);
            mFogShader.Parameters["Timer"].SetValue(mTimer);
            mFogShader.Parameters["Speed"].SetValue(mSpeed);
            mFogShader.Parameters["FogFactorMin"].SetValue(mFogFactorMin);
            mFogShader.Parameters["FogFactorMax"].SetValue(mFogFactorMax);
            mFogShader.Parameters["FogStrength"].SetValue(0.3f);


            mGrapicsDevice.Textures[1] = mSceneDepthMap;
            mGrapicsDevice.Textures[2] = mFogTexture;
            mGrapicsDevice.Textures[3] = mSceneDiffuseMap;

            mFogShader.CurrentTechnique.Passes[0].Apply();
            QuadRenderer.Render(mGrapicsDevice);



            mGrapicsDevice.SetRenderTarget(null);
            mGrapicsDevice.Clear(Color.Black);


            mGrapicsDevice.Textures[1] = mFogTarget;
            mSimpleDrawShader.CurrentTechnique.Passes[0].Apply();
            QuadRenderer.Render(mGrapicsDevice);
        }
Пример #2
0
        public void Render()
        {
            KryptonEngine.EngineSettings.Graphics.GraphicsDevice.SetRenderTarget(mHatchingTarget);
            KryptonEngine.EngineSettings.Graphics.GraphicsDevice.Clear(Color.Transparent);

            HatchingShader.Parameters["repeat"].SetValue(mResolution);

            KryptonEngine.EngineSettings.Graphics.GraphicsDevice.Textures[1] = mLightMap;

            HatchingShader.CurrentTechnique.Passes[0].Apply();
            QuadRenderer.Render(KryptonEngine.EngineSettings.Graphics.GraphicsDevice);
        }
Пример #3
0
        protected override void OnRenderFrame(FrameEventArgs args)
        {
            GL.Clear(ClearBufferMask.ColorBufferBit);

            quadRenderer.Reset();

            //vex.Render(quadRenderer);

            helloText.Render(quadRenderer);

            quadRenderer.Render();

            Context.SwapBuffers();
            base.OnRenderFrame(args);
        }
Пример #4
0
        public void ProcessLight(List <Light> pLightList, Matrix pTranslation)
        {
            EngineSettings.Graphics.GraphicsDevice.SetRenderTarget(mLightTarget);
            EngineSettings.Graphics.GraphicsDevice.Clear(Color.Transparent);

            EngineSettings.Graphics.GraphicsDevice.BlendState = mLightMapBlendState;

            KryptonEngine.EngineSettings.Graphics.GraphicsDevice.Textures[0] = mGBuffer.RenderTargets[1];
            KryptonEngine.EngineSettings.Graphics.GraphicsDevice.Textures[1] = mGBuffer.RenderTargets[3];

            this.mTranslatetViewMatrix = Matrix.Multiply(mView, pTranslation);

            //this.mLightShader.Parameters["World"].SetValue(this.mWorld);
            this.mLightShader.Parameters["View"].SetValue(pTranslation);
            //this.mLightShader.Parameters["Projection"].SetValue(this.mProjection);


            foreach (Light l in pLightList)
            {
                if (!l.IsVisible)
                {
                    continue;
                }
                Vector4 lightPos = new Vector4(l.Position, l.Depth * 720, 1f);

                this.mLightShader.Parameters["View"].SetValue(pTranslation);
                this.mLightShader.Parameters["LightIntensity"].SetValue(l.Intensity);
                this.mLightShader.Parameters["LightColor"].SetValue(l.LightColor);
                this.mLightShader.Parameters["LightPosition"].SetValue(lightPos);
                this.mLightShader.Parameters["screen"].SetValue(new Vector2(EngineSettings.VirtualResWidth, EngineSettings.VirtualResHeight));

                if (l.GetType() == typeof(PointLight))
                {
                    PointLight tempPl = (PointLight)l;

                    mLightShader.Parameters["LightRadius"].SetValue(tempPl.Radius);
                    mLightShader.CurrentTechnique.Passes[0].Apply();
                }
                //directional Light!

                QuadRenderer.Render(this.mGraphicsDevice);
            }

            EngineSettings.Graphics.GraphicsDevice.SetRenderTarget(null);
        }
Пример #5
0
        /// <summary>
        /// Draws a directional light.
        /// </summary>
        /// <param name="lightDirection">The light direction.</param>
        /// <param name="color">The color.</param>
        void DrawDirectionalLight(RenderTarget2D colorRT, RenderTarget2D normalRT, RenderTarget2D depthRT, Camera camera)
        {
            // Set all parameters
            directionalLightEffect.Parameters["colorMap"].SetValue(colorRT);
            directionalLightEffect.Parameters["normalMap"].SetValue(normalRT);
            directionalLightEffect.Parameters["depthMap"].SetValue(depthRT);
            directionalLightEffect.Parameters["lightDirection"].SetValue(light.Direction);
            directionalLightEffect.Parameters["Color"].SetValue(light.Color);
            directionalLightEffect.Parameters["cameraPosition"].SetValue(camera.Position);
            directionalLightEffect.Parameters["InvertViewProjection"].SetValue(Matrix.Invert(camera.ViewMatrix * camera.ProjectionMatrix));
            directionalLightEffect.Parameters["halfPixel"].SetValue(halfPixel);

            // Apply the Effect
            directionalLightEffect.Techniques[0].Passes[0].Apply();

            // Draw a FullscreenQuad
            fullscreenQuad.Render(Vector2.One * -1, Vector2.One);
        }
Пример #6
0
        public void ProcessFinalScene()
        {
            EngineSettings.Graphics.GraphicsDevice.SetRenderTarget(mFinalTarget);
            EngineSettings.Graphics.GraphicsDevice.Clear(Color.Transparent);


            EngineSettings.Graphics.GraphicsDevice.Textures[0] = this.mGBuffer.RenderTargets[0];
            EngineSettings.Graphics.GraphicsDevice.Textures[1] = mLightTarget;
            EngineSettings.Graphics.GraphicsDevice.Textures[2] = this.mGBuffer.RenderTargets[2];


            this.mCombineShader.Parameters["ambientColor"].SetValue(AmbientLight.LightColor);
            this.mCombineShader.Parameters["ambientIntensity"].SetValue(AmbientLight.Intensity);

            mCombineShader.CurrentTechnique.Passes[0].Apply();

            QuadRenderer.Render(this.mGraphicsDevice);
            EngineSettings.Graphics.GraphicsDevice.SetRenderTarget(null);
        }
Пример #7
0
        public RenderTarget2D RenderOcean(RenderTarget2D waterScene, Vector2 camMove)
        {
            noisePow  = new Vector2(0.031f, 0.03125f) * 3; // 3 tile sample radius looks good
            noiseFreq = 1.0f;

            // TODO set offset by location in map (ocean currents)
            noiseOffset += 0.0002f;

            //oceanRippleEffect.Parameters["WorldViewProjection"].SetValue(wvm);
            oceanRippleEffect.Parameters["noiseOffset"].SetValue(noiseOffset);
            oceanRippleEffect.Parameters["noiseFrequency"].SetValue(noiseFreq);
            oceanRippleEffect.Parameters["camMove"].SetValue(camMove);
            oceanRippleEffect.Parameters["noisePower"].SetValue(noisePow);
            oceanRippleEffect.Parameters["noiseTexture"].SetValue(noiseMap);
            oceanRippleEffect.Parameters["water"].SetValue(waterScene);

            _graphics.SetRenderTarget(oceanEffectRT);
            _graphics.Clear(Color.PeachPuff);
            oceanRippleEffect.CurrentTechnique.Passes[0].Apply();
            _quadRenderer.Render(Vector2.One * -1, Vector2.One);

            return(oceanEffectRT);
        }
Пример #8
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    if (canGetHolographicDisplayForCamera &&
                        cameraPose.HolographicCamera.Display.IsOpaque)
                    {
                        SharpDX.Mathematics.Interop.RawColor4 cornflowerBlue = new SharpDX.Mathematics.Interop.RawColor4(0.392156899f, 0.58431375f, 0.929411829f, 1.0f);
                        context.ClearRenderTargetView(renderTargetView, cornflowerBlue);
                    }
                    else
                    {
                        SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                        context.ClearRenderTargetView(renderTargetView, transparent);
                    }
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    //
                    // TODO: Replace the sample content with your own content.
                    //
                    // Notes regarding holographic content:
                    //    * For drawing, remember that you have the potential to fill twice as many pixels
                    //      in a stereoscopic render target as compared to a non-stereoscopic render target
                    //      of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
                    //      and only draw holograms that the user can see.
                    //    * To help occlude hologram geometry, you can create a depth map using geometry
                    //      data obtained via the surface mapping APIs. You can use this depth map to avoid
                    //      rendering holograms that are intended to be hidden behind tables, walls,
                    //      monitors, and so on.
                    //    * On HolographicDisplays that are transparent, black pixels will appear transparent
                    //      to the user. On such devices, you should clear the screen to Transparent as shown
                    //      above. You should still use alpha blending to draw semitransparent holograms.
                    //


                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    if (stationaryReferenceFrame != null)
                    {
                        cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, stationaryReferenceFrame.CoordinateSystem);
                    }

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

#if DRAW_SAMPLE_CONTENT
                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        // Draw the sample hologram.
                        quadRendererR.Render();
                        quadRendererL.Render();

                        if (canCommitDirect3D11DepthBuffer)
                        {
                            // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can
                            // provide the depth buffer to the system, and it will use depth information to stabilize
                            // the image at a per-pixel level.
                            HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
                            SharpDX.Direct3D11.Texture2D depthBuffer = cameraResources.DepthBufferTexture2D;

                            // Direct3D interop APIs are used to provide the buffer to the WinRT API.
                            SharpDX.DXGI.Resource1 depthStencilResource = depthBuffer.QueryInterface <SharpDX.DXGI.Resource1>();
                            SharpDX.DXGI.Surface2 depthDxgiSurface = new SharpDX.DXGI.Surface2(depthStencilResource, 0);
                            IDirect3DSurface depthD3DSurface = InteropStatics.CreateDirect3DSurface(depthDxgiSurface.NativePointer);
                            if (depthD3DSurface != null)
                            {
                                // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to
                                // read the depth buffer. It will then use that information to stabilize the image as
                                // the HolographicFrame is presented.
                                renderingParameters.CommitDirect3D11DepthBuffer(depthD3DSurface);
                            }
                        }
                    }
#endif
                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }
Пример #9
0
        protected override void LoadContent()
        {
            base.LoadContent();

            ContentWrapper.Initialize(this);

            _spriteBatch  = new SpriteBatch(GraphicsDevice);
            _lineBatch    = new LineBatch(GraphicsDevice);
            _quadRenderer = new QuadRenderer(GraphicsDevice);

            _input.LoadContent(GraphicsDevice.Viewport);
#if WINDOWS
            _counter.LoadContent();
#endif

            // Create rendertarget for transitions
            PresentationParameters pp = GraphicsDevice.PresentationParameters;
            _transitions.Add(new RenderTarget2D(GraphicsDevice, pp.BackBufferWidth, pp.BackBufferHeight, false, SurfaceFormat.Color, pp.DepthStencilFormat, pp.MultiSampleCount, RenderTargetUsage.DiscardContents));

            _menuScreen = new MenuScreen();

            List <Type> DemosToLoad      = new List <Type>();
            Assembly    samplesFramework = Assembly.GetExecutingAssembly();
            foreach (Type sampleType in samplesFramework.GetTypes())
            {
                if (sampleType.IsSubclassOf(typeof(PhysicsDemoScreen)))
                {
                    DemosToLoad.Add(sampleType);
                }
            }
            DemosToLoad.Add(DemosToLoad[0]); // HACK: Load the first sample two times, since some delayed creation stuff with the rendertargets always breaks the first preview picture.
            bool firstPreview = true;
            foreach (Type sampleType in DemosToLoad)
            {
                PhysicsDemoScreen demoScreen = samplesFramework.CreateInstance(sampleType.ToString()) as PhysicsDemoScreen;
#if WINDOWS
                if (!firstPreview)
                {
                    Console.WriteLine("Loading demo: " + demoScreen.GetTitle());
                }
#endif
                RenderTarget2D preview = new RenderTarget2D(GraphicsDevice, pp.BackBufferWidth / 2, pp.BackBufferHeight / 2, false, SurfaceFormat.Color, pp.DepthStencilFormat, pp.MultiSampleCount, RenderTargetUsage.DiscardContents);

                demoScreen.Framework = this;
                demoScreen.IsExiting = false;

                demoScreen.Sprites = _spriteBatch;
                demoScreen.Lines   = _lineBatch;
                demoScreen.Quads   = _quadRenderer;

                demoScreen.LoadContent();

                // "Abuse" transition rendertarget to render screen preview
                GraphicsDevice.SetRenderTarget(_transitions[0]);
                GraphicsDevice.Clear(Color.Transparent);

                _quadRenderer.Begin();
                _quadRenderer.Render(Vector2.Zero, new Vector2(_transitions[0].Width, _transitions[0].Height), null, true, ContentWrapper.Grey, Color.White * 0.3f);
                _quadRenderer.End();

                // Update ensures that the screen is fully visible, we "cover" it so that no physics are run
                demoScreen.Update(new GameTime(demoScreen.TransitionOnTime, demoScreen.TransitionOnTime), true, false);
                demoScreen.Draw(new GameTime());
                demoScreen.Draw(new GameTime());

                GraphicsDevice.SetRenderTarget(preview);
                GraphicsDevice.Clear(Color.Transparent);

                _spriteBatch.Begin();
                _spriteBatch.Draw(_transitions[0], preview.Bounds, Color.White);
                _spriteBatch.End();

                GraphicsDevice.SetRenderTarget(null);

                demoScreen.ExitScreen();
                demoScreen.Update(new GameTime(demoScreen.TransitionOffTime, demoScreen.TransitionOffTime), true, false);
                if (!firstPreview)
                {
                    _menuScreen.AddMenuItem(demoScreen, preview);
                }
                else
                {
                    firstPreview = false;
                }
            }

            AddScreen(new BackgroundScreen());
            AddScreen(_menuScreen);

            //TODO: This can't be called at this point in time in MonoGame
            //ResetElapsedTime();
        }
Пример #10
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(ref HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                    context.ClearRenderTargetView(renderTargetView, transparent);
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    //
                    // TODO: Replace the sample content with your own content.
                    //
                    // Notes regarding holographic content:
                    //    * For drawing, remember that you have the potential to fill twice as many pixels
                    //      in a stereoscopic render target as compared to a non-stereoscopic render target
                    //      of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
                    //      and only draw holograms that the user can see.
                    //    * To help occlude hologram geometry, you can create a depth map using geometry
                    //      data obtained via the surface mapping APIs. You can use this depth map to avoid
                    //      rendering holograms that are intended to be hidden behind tables, walls,
                    //      monitors, and so on.
                    //    * Black pixels will appear transparent to the user wearing the device, but you
                    //      should still use alpha blending to draw semitransparent holograms. You should
                    //      also clear the screen to Transparent as shown above.
                    //


                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem);

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

#if DRAW_SAMPLE_CONTENT
                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        // Draw the sample hologram.
                        quadRenderer.Render();
                    }
#endif
                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }