protected override void OnRenderFrame(FrameEventArgs args) { GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); _renderer.Render(args.Time); Context.SwapBuffers(); base.OnRenderFrame(args); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(ref HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return(false); } // // TODO: Add code for pre-pass rendering here. // // Take care of any tasks that are not specific to an individual holographic // camera. This includes anything that doesn't need the final view or projection // matrix, such as lighting maps. // // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return(deviceResources.UseHolographicCameraResources( (Dictionary <uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // // TODO: Replace the sample content with your own content. // // Notes regarding holographic content: // * For drawing, remember that you have the potential to fill twice as many pixels // in a stereoscopic render target as compared to a non-stereoscopic render target // of the same resolution. Avoid unnecessary or repeated writes to the same pixel, // and only draw holograms that the user can see. // * To help occlude hologram geometry, you can create a depth map using geometry // data obtained via the surface mapping APIs. You can use this depth map to avoid // rendering holograms that are intended to be hidden behind tables, walls, // monitors, and so on. // * Black pixels will appear transparent to the user wearing the device, but you // should still use alpha blending to draw semitransparent holograms. You should // also clear the screen to Transparent as shown above. // // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem); // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); #if DRAW_SAMPLE_CONTENT // Only render world-locked content when positional tracking is active. if (cameraActive) { // Draw the sample hologram. //spinningCubeRenderer.Render(); // cameraTestRenderer.Render(); // meshTestRenderer.Render(); meshCollectionTexturer.Render(); } #endif atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; })); }