/// <summary> /// This method is called after the window becomes active. It oversees the /// update, draw, and present loop, and also oversees window message processing. /// </summary> public void Run() { HolographicFrame previousFrame = null; while (!windowClosed) { if (windowVisible && (null != holographicSpace)) { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent); HolographicFrame currentFrame = main.Update(previousFrame); if (main.Render(currentFrame)) { deviceResources.Present(currentFrame); } previousFrame = currentFrame; } else { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending); } } }
public HolographicGraphicsPresenter(GraphicsDevice device, PresentationParameters presentationParameters, HolographicSpace holographicSpace) : base(device, presentationParameters) { if (GraphicsDevice.RenderTargetViewAllocator.DescriptorHeap.Description.DescriptorCount != BufferCount) { GraphicsDevice.RenderTargetViewAllocator.Dispose(); GraphicsDevice.RenderTargetViewAllocator = new DescriptorAllocator(GraphicsDevice, DescriptorHeapType.RenderTargetView, descriptorCount: BufferCount); } using (IDXGIDevice dxgiDevice = GraphicsDevice.NativeDirect3D11Device.QueryInterface <IDXGIDevice>()) { IDirect3DDevice direct3DInteropDevice = Direct3DInterop.CreateDirect3DDevice(dxgiDevice); HolographicSpace = holographicSpace; HolographicSpace.SetDirect3D11Device(direct3DInteropDevice); } HolographicDisplay = HolographicDisplay.GetDefault(); SpatialStationaryFrameOfReference = HolographicDisplay.SpatialLocator.CreateStationaryFrameOfReferenceAtCurrentLocation(); HolographicFrame = HolographicSpace.CreateNextFrame(); HolographicSurface = HolographicFrame.GetRenderingParameters(HolographicFrame.CurrentPrediction.CameraPoses[0]).Direct3D11BackBuffer; HolographicBackBuffer = GetHolographicBackBuffer(); renderTarget = CreateRenderTarget(); direct3D11RenderTarget = CreateDirect3D11RenderTarget(); }
private void sourceUpdate(SpatialInteractionManager manager, SpatialInteractionSourceEventArgs args) { SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; SpatialInteractionSourceLocation pos = args.State.Properties.TryGetLocation(currentCoordinateSystem); HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Get the gaze direction relative to the given coordinate system. Vector3 headPosition = (Vector3)pos.Position; SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp); SpatialInteractionSource source = args.State.Source; Vector3 headDirection = pose.Head.ForwardDirection; // The hologram is positioned two meters along the user's gaze direction. float distanceFromUser = 0.1f; // meters Vector3 gazeAtTwoMeters = headPosition + (distanceFromUser * headDirection); // This will be used as the translation component of the hologram's // model transform. this.position = gazeAtTwoMeters; }
private void UpdateBackBuffer() { IDirect3DSurface surface = HolographicFrame.GetRenderingParameters(HolographicFrame.CurrentPrediction.CameraPoses[0]).Direct3D11BackBuffer; IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as IDirect3DDxgiInterfaceAccess; IntPtr resource = surfaceDxgiInterfaceAccess.GetInterface(ID3D11Resource); if (backBuffer == null || backBuffer.NativeResource.NativePointer != resource) { // Clean up references to previous resources. backBuffer?.Dispose(); LeftEyeBuffer?.Dispose(); RightEyeBuffer?.Dispose(); // This can change every frame as the system moves to the next buffer in the // swap chain. This mode of operation will occur when certain rendering modes // are activated. Texture2D d3DBackBuffer = new Texture2D(resource); backBuffer = new Texture(GraphicsDevice).InitializeFromImpl(d3DBackBuffer, false); LeftEyeBuffer = backBuffer.ToTextureView(new TextureViewDescription() { ArraySlice = 0, Type = ViewType.Single }); RightEyeBuffer = backBuffer.ToTextureView(new TextureViewDescription() { ArraySlice = 1, Type = ViewType.Single }); } Description.BackBufferFormat = backBuffer.Format; Description.BackBufferWidth = backBuffer.Width; Description.BackBufferHeight = backBuffer.Height; }
/// <summary> /// Validates the back buffer for each HolographicCamera and recreates /// resources for back buffers that have changed. /// Locks the set of holographic camera resources until the function exits. /// </summary> public void EnsureCameraResources(HolographicFrame frame, HolographicFramePrediction prediction) { UseHolographicCameraResources(cameraResourcesDictionary => { foreach (var pose in prediction.CameraPoses) { var renderingParameters = frame.GetRenderingParameters(pose); var cameraResources = cameraResourcesDictionary[pose.HolographicCamera.Id]; cameraResources.CreateResourcesForBackBuffer(this, renderingParameters); } }); }
private ID3D11Texture2D GetHolographicBackBuffer() { HolographicSurface = HolographicFrame.GetRenderingParameters(HolographicFrame.CurrentPrediction.CameraPoses[0]).Direct3D11BackBuffer; using IDXGISurface surface = Direct3DInterop.CreateDXGISurface(HolographicSurface); ID3D11Texture2D d3DBackBuffer = new ID3D11Texture2D(surface.NativePointer); PresentationParameters.BackBufferFormat = (PixelFormat)d3DBackBuffer.Description.Format; PresentationParameters.BackBufferWidth = d3DBackBuffer.Description.Width; PresentationParameters.BackBufferHeight = d3DBackBuffer.Description.Height; return(d3DBackBuffer); }
/// <summary> /// Present the contents of the swap chain to the screen. /// Locks the set of holographic camera resources until the function exits. /// </summary> public void Present(HolographicFrame frame) { var presentResult = frame.PresentUsingCurrentPrediction( HolographicFramePresentWaitBehavior.DoNotWaitForFrameToFinish ); // The PresentUsingCurrentPrediction API will detect when the graphics device // changes or becomes invalid. When this happens, it is considered a Direct3D // device lost scenario. if (presentResult == HolographicFramePresentResult.DeviceRemoved) { // The Direct3D device, context, and resources should be recreated. HandleDeviceLost(); } }
/// <inheritdoc /> private void OnPreCull() { if (!ReadingModeEnabled) { return; } const float ResolutionScale = 45.0f / 33.0f; StartCoroutine(ResetViewMatricesOnFrameEnd()); Matrix4x4 leftProj = CameraCache.Main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left); Matrix4x4 rightProj = CameraCache.Main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right); leftProj.m00 *= ResolutionScale; leftProj.m11 *= ResolutionScale; rightProj.m00 *= ResolutionScale; rightProj.m11 *= ResolutionScale; CameraCache.Main.SetStereoProjectionMatrix(Camera.StereoscopicEye.Left, leftProj); CameraCache.Main.SetStereoProjectionMatrix(Camera.StereoscopicEye.Right, rightProj); HolographicFrame holographicFrame = WindowsMixedRealityUtilities.CurrentWindowsHolographicFrame; if (holographicFrame != null) { HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; for (int i = 0; i < prediction.CameraPoses.Count; ++i) { HolographicCameraPose cameraPose = prediction.CameraPoses[i]; if (cameraPose.HolographicCamera.CanOverrideViewport) { HolographicStereoTransform stereoProjection = cameraPose.ProjectionTransform; stereoProjection.Left.M11 *= ResolutionScale; stereoProjection.Left.M22 *= ResolutionScale; stereoProjection.Right.M11 *= ResolutionScale; stereoProjection.Right.M22 *= ResolutionScale; cameraPose.OverrideProjectionTransform(stereoProjection); } } } }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// This method is called after the window becomes active. It oversees the /// update, draw, and present loop, and also oversees window message processing. /// </summary> public void Run() { while (!this.windowClosed) { if (this.windowVisible && (null != this.holographicSpace)) { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent); HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame(); this.main.UpdateAndDraw(); } else { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending); } } }
/// <summary> /// Present the contents of the swap chain to the screen. /// Locks the set of holographic camera resources until the function exits. /// </summary> public void Present(ref HolographicFrame frame) { // By default, this API waits for the frame to finish before it returns. // Holographic apps should wait for the previous frame to finish before // starting work on a new frame. This allows for better results from // holographic frame predictions. var presentResult = frame.PresentUsingCurrentPrediction( HolographicFramePresentWaitBehavior.WaitForFrameToFinish ); // The PresentUsingCurrentPrediction API will detect when the graphics device // changes or becomes invalid. When this happens, it is considered a Direct3D // device lost scenario. if (presentResult == HolographicFramePresentResult.DeviceRemoved) { // The Direct3D device, context, and resources should be recreated. HandleDeviceLost(); } }
void IRenderLoopHost.OnRenderLoop_PrepareRendering(EngineDevice device) { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = m_holoSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = m_referenceFrame.CoordinateSystem; }
/// <summary> /// This method is called after the window becomes active. It oversees the /// update, draw, and present loop, and also oversees window message processing. /// </summary> public void Run() { CoreWindow window = CoreWindow.GetForCurrentThread(); window.Activate(); while (!windowClosed) { if (createShaders) { if (!createdShaders) { main.CreateVideoShaders(); createdShaders = true; } } else { window.Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending); continue; } if (appViewConsolidated || mainWindowReactivated) { TryCloseWindow(); windowClosed = true; } else { window.Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent); if (windowVisible && (null != holographicSpace)) { HolographicFrame frame = main.Update(); if (main.Render(ref frame)) { deviceResources.Present(ref frame); } } } } }
/// <summary> /// Present the contents of the swap chain to the screen. /// Locks the set of holographic camera resources until the function exits. /// </summary> public void Present(ref HolographicFrame frame) { // By default, this API waits for the frame to finish before it returns. // Holographic apps should wait for the previous frame to finish before // starting work on a new frame. This allows for better results from // holographic frame predictions. var presentResult = frame.PresentUsingCurrentPrediction( HolographicFramePresentWaitBehavior.WaitForFrameToFinish ); HolographicFramePrediction prediction = frame.CurrentPrediction; UseHolographicCameraResources((Dictionary <uint, CameraResources> cameraResourcesDictionary) => { foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourcesDictionary[cameraPose.HolographicCamera.Id]; // Discard the contents of the render target. // This is a valid operation only when the existing contents will be // entirely overwritten. If dirty or scroll rects are used, this call // should be removed. d3dContext.DiscardView(cameraResources.BackBufferRenderTargetView); // Discard the contents of the depth stencil. d3dContext.DiscardView(cameraResources.DepthStencilView); } }); // The PresentUsingCurrentPrediction API will detect when the graphics device // changes or becomes invalid. When this happens, it is considered a Direct3D // device lost scenario. if (presentResult == HolographicFramePresentResult.DeviceRemoved) { // The Direct3D device, context, and resources should be recreated. HandleDeviceLost(); } }
/// <summary> /// This method is called after the window becomes active. It oversees the /// update, draw, and present loop, and also oversees window message processing. /// </summary> public void Run() { Debug.WriteLine("VRPlayer.AppView.Run"); while (!windowClosed) { if (windowVisible && (null != holographicSpace)) { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent); HolographicFrame frame = main.Update(); if (main.Render(ref frame)) { deviceResources.Present(ref frame); } } else { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending); } } }
public override void Present() { GraphicsDevice.NativeDirect3D11Device.ImmediateContext.CopyResource(direct3D11RenderTarget, HolographicBackBuffer); HolographicFrame.PresentUsingCurrentPrediction(); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(ref HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return(false); } // // TODO: Add code for pre-pass rendering here. // // Take care of any tasks that are not specific to an individual holographic // camera. This includes anything that doesn't need the final view or projection // matrix, such as lighting maps. // // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return(deviceResources.UseHolographicCameraResources( (Dictionary <uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp); if (referenceFrameCoordinateSystem == null) { continue; } cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrameCoordinateSystem); // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); // Only render world-locked content when positional tracking is active. if (cameraActive) { foreach (var entity in entities) { entity.Render(); } } atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; })); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return(false); } // // TODO: Add code for pre-pass rendering here. // // Take care of any tasks that are not specific to an individual holographic // camera. This includes anything that doesn't need the final view or projection // matrix, such as lighting maps. // // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return(deviceResources.UseHolographicCameraResources( (Dictionary <uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. if (canGetHolographicDisplayForCamera && cameraPose.HolographicCamera.Display.IsOpaque) { SharpDX.Mathematics.Interop.RawColor4 cornflowerBlue = new SharpDX.Mathematics.Interop.RawColor4(0.392156899f, 0.58431375f, 0.929411829f, 1.0f); context.ClearRenderTargetView(renderTargetView, cornflowerBlue); } else { SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); } context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // // TODO: Replace the sample content with your own content. // // Notes regarding holographic content: // * For drawing, remember that you have the potential to fill twice as many pixels // in a stereoscopic render target as compared to a non-stereoscopic render target // of the same resolution. Avoid unnecessary or repeated writes to the same pixel, // and only draw holograms that the user can see. // * To help occlude hologram geometry, you can create a depth map using geometry // data obtained via the surface mapping APIs. You can use this depth map to avoid // rendering holograms that are intended to be hidden behind tables, walls, // monitors, and so on. // * On HolographicDisplays that are transparent, black pixels will appear transparent // to the user. On such devices, you should clear the screen to Transparent as shown // above. You should still use alpha blending to draw semitransparent holograms. // // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. if (stationaryReferenceFrame != null) { cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, stationaryReferenceFrame.CoordinateSystem); } // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); #if DRAW_SAMPLE_CONTENT // Only render world-locked content when positional tracking is active. if (cameraActive) { // Draw the sample hologram. spinningCubeRenderer.Render(); if (canCommitDirect3D11DepthBuffer) { // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can // provide the depth buffer to the system, and it will use depth information to stabilize // the image at a per-pixel level. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); SharpDX.Direct3D11.Texture2D depthBuffer = cameraResources.DepthBufferTexture2D; // Direct3D interop APIs are used to provide the buffer to the WinRT API. SharpDX.DXGI.Resource1 depthStencilResource = depthBuffer.QueryInterface <SharpDX.DXGI.Resource1>(); SharpDX.DXGI.Surface2 depthDxgiSurface = new SharpDX.DXGI.Surface2(depthStencilResource, 0); IDirect3DSurface depthD3DSurface = InteropStatics.CreateDirect3DSurface(depthDxgiSurface.NativePointer); if (depthD3DSurface != null) { // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to // read the depth buffer. It will then use that information to stabilize the image as // the HolographicFrame is presented. renderingParameters.CommitDirect3D11DepthBuffer(depthD3DSurface); } } } #endif atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; })); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); if (!canCommitDirect3D11DepthBuffer) { // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control // image stabilization by setting a focus point with optional plane normal and velocity. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, spinningCubeRenderer.Position ); } #endif } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update(HolographicFrame previousFrame) { // TODO: Put CPU work that does not depend on the HolographicCameraPose here. // Apps should wait for the optimal time to begin pose-dependent work. // The platform will automatically adjust the wakeup time to get // the lowest possible latency at high frame rates. For manual // control over latency, use the WaitForNextFrameReadyWithHeadStart // API. // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the // preferred frame synchronization APIs for Windows Mixed Reality. When // running on older versions of the OS that do not include support for // these APIs, your app can use the WaitForFrameToFinish API for similar // (but not as optimal) behavior. if (canUseWaitForNextFrameReadyAPI) { try { holographicSpace.WaitForNextFrameReady(); } catch (NotImplementedException) { // Catch a specific case where WaitForNextFrameReady() is present but not implemented // and default back to WaitForFrameToFinish() in that case. canUseWaitForNextFrameReadyAPI = false; } } else if (previousFrame != null) { previousFrame.WaitForFrameToFinish(); } // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. quadRendererR.PositionHologram(pose); quadRendererL.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT quadRendererR.Update(timer); quadRendererL.Update(timer); #endif }); // On HoloLens 2, the platform can achieve better image stabilization results if it has // a stabilization plane and a depth buffer. // Note that the SetFocusPoint API includes an override which takes velocity as a // parameter. This is recommended for stabilizing holograms in motion. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, new System.Numerics.Vector3(0, 0, 0) ); } #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public void UpdateAndDraw() { HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. this.deviceResources.EnsureCameraResources(holographicFrame, prediction); this.UpdateEyeProperties(); // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); // Get a prediction of where holographic cameras will be when this frame // is presented. prediction = holographicFrame.CurrentPrediction; // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = this.ReferenceFrame.CoordinateSystem; var eyeTexture = this.eyesProperties[0].Texture; this.deviceResources.UpdateCameraClipDistance(eyeTexture.NearPlane, eyeTexture.FarPlane); holographicFrame.UpdateCurrentPrediction(); prediction = holographicFrame.CurrentPrediction; foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. if (this.mixedRealityService.FocusPosition.HasValue) { var position = this.mixedRealityService.FocusPosition.Value; if (!this.mixedRealityService.FocusNormal.HasValue) { renderingParameters.SetFocusPoint(currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z)); } else { var normal = this.mixedRealityService.FocusNormal.Value; if (!this.mixedRealityService.FocusVelocity.HasValue) { renderingParameters.SetFocusPoint( currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z), new System.Numerics.Vector3(normal.X, normal.Y, normal.Z)); } else { var velocity = this.mixedRealityService.FocusVelocity.Value; renderingParameters.SetFocusPoint( currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z), new System.Numerics.Vector3(normal.X, normal.Y, normal.Z), new System.Numerics.Vector3(velocity.X, velocity.Y, velocity.Z)); } } } var pointerPose = SpatialPointerPose.TryGetAtTimestamp(this.ReferenceFrame.CoordinateSystem, prediction.Timestamp); if (pointerPose != null) { pointerPose.Head.Position.ToWave(out this.headRay.Position); pointerPose.Head.ForwardDirection.ToWave(out this.headRay.Direction); } var viewTransaform = cameraPose.TryGetViewTransform(this.ReferenceFrame.CoordinateSystem); var projectionTransform = cameraPose.ProjectionTransform; if (viewTransaform.HasValue) { for (int i = 0; i < 2; i++) { Matrix viewMatrix; Matrix projectionMatrix; if (i == (int)VREyeType.LeftEye) { viewTransaform.Value.Left.ToWave(out viewMatrix); projectionTransform.Left.ToWave(out projectionMatrix); } else { viewTransaform.Value.Right.ToWave(out viewMatrix); projectionTransform.Right.ToWave(out projectionMatrix); } Matrix view; Matrix.Invert(ref viewMatrix, out view); var eyeProperties = this.eyesProperties[i]; var eyePose = eyeProperties.Pose; eyePose.Position = view.Translation; Quaternion.CreateFromRotationMatrix(ref view, out eyePose.Orientation); eyeProperties.Pose = eyePose; eyeProperties.Projection = projectionMatrix; } var leftEyePose = this.eyesProperties[(int)VREyeType.LeftEye].Pose; var rightEyePose = this.eyesProperties[(int)VREyeType.RightEye].Pose; var centerEyeProperties = this.eyesProperties[(int)VREyeType.CenterEye]; var centerEyePose = centerEyeProperties.Pose; centerEyePose.Position = Vector3.Lerp(leftEyePose.Position, rightEyePose.Position, 0.5f); centerEyePose.Orientation = Quaternion.Lerp(leftEyePose.Orientation, rightEyePose.Orientation, 0.5f); centerEyeProperties.Pose = centerEyePose; } } this.Render(); this.deviceResources.Present(ref holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( // pointerState.TryGetPointerPose(currentCoordinateSystem) SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp) ); } //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down; //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate; //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate); //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate); //if (rightclick) //{ // Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews); //} #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. Vector3 position = new Vector3(0.0f, 0.0f, -3.0f); renderingParameters.SetFocusPoint( currentCoordinateSystem, position /*spinningCubeRenderer.Position*/ ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
public unsafe void Run() { ReferenceFrame = SpatialLocator.GetDefault().CreateStationaryFrameOfReferenceAtCurrentLocation(); CoreWindow.GetForCurrentThread().CustomProperties.Add("HolographicSpace", HolographicSpace); InitializeSpace(); InteractionManager = SpatialInteractionManager.GetForCurrentView(); InteractionManager.InteractionDetected += (s, e) => GesturesManager?.HandleInteraction(e.Interaction); while (!windowClosed) { if (!appInited) { SpatialMappingManager = new SpatialMappingManager(); VoiceManager = new VoiceManager(); appInited = true; Game = (HoloApplication)Activator.CreateInstance(holoAppType, assetsDirectory); Game.Run(); Game.Engine.PostUpdate += e => currentFrame?.UpdateCurrentPrediction(); GesturesManager = new GesturesManager(Game, ReferenceFrame); } if (windowVisible && (null != HolographicSpace)) { if (Game != null) { currentFrame = HolographicSpace.CreateNextFrame(); var prediction = currentFrame.CurrentPrediction; if (prediction.CameraPoses.Count < 1) { continue; } var cameraPose = prediction.CameraPoses[0]; var viewBox = cameraPose.TryGetViewTransform(ReferenceFrame.CoordinateSystem); if (viewBox != null) { Matrix4x4 leftViewMatrixDx = viewBox.Value.Left; Matrix4x4 rightViewMatrixDx = viewBox.Value.Right; Matrix4x4 leftProjMatrixDx = cameraPose.ProjectionTransform.Left; Matrix4x4 rightProjMatrixDx = cameraPose.ProjectionTransform.Right; Matrix4 leftViewMatrixUrho = *(Matrix4 *)(void *)&leftViewMatrixDx; Matrix4 rightViewMatrixUrho = *(Matrix4 *)(void *)&rightViewMatrixDx; Matrix4 leftProjMatrixUrho = *(Matrix4 *)(void *)&leftProjMatrixDx; Matrix4 rightProjMatrixUrho = *(Matrix4 *)(void *)&rightProjMatrixDx; Game.UpdateStereoView(leftViewMatrixUrho, rightViewMatrixUrho, leftProjMatrixUrho, rightProjMatrixUrho); } var parameters = currentFrame.GetRenderingParameters(cameraPose); if (Game.FocusWorldPoint != Vector3.Zero) { parameters.SetFocusPoint(ReferenceFrame.CoordinateSystem, new System.Numerics.Vector3( Game.FocusWorldPoint.X, Game.FocusWorldPoint.Y, -Game.FocusWorldPoint.Z)); //LH->RH } Game.Engine.RunFrame(); currentFrame.PresentUsingCurrentPrediction(HolographicFramePresentWaitBehavior.WaitForFrameToFinish); } CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent); } else { CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending); } } }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( pointerState.TryGetPointerPose(currentCoordinateSystem) ); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); _spatialSurfaceRenderer.Update(); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. renderingParameters.SetFocusPoint( currentCoordinateSystem, spinningCubeRenderer.Position ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
public override void BeginDraw(CommandList commandList) { HolographicFrame = holographicSpace.CreateNextFrame(); UpdateBackBuffer(); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { if (MediaPlayer.IsEndOfStream) { ViewManagement.SwitchTo2DViewAsync(); } // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; var gestureStatus = spatialInputHandler.CheckGestureStatus(); switch (gestureStatus) { case SpatialGestureSettings.None: break; case SpatialGestureSettings.Tap: videoRenderer.Pause(); break; case SpatialGestureSettings.DoubleTap: videoRenderer.Stop(); ViewManagement.SwitchTo2DViewAsync(); break; case SpatialGestureSettings.Hold: videoRenderer.FastForward(spatialInputHandler.HoldTotalTime.TotalSeconds); break; case SpatialGestureSettings.ManipulationTranslate: break; case SpatialGestureSettings.NavigationX: break; case SpatialGestureSettings.NavigationY: break; case SpatialGestureSettings.NavigationZ: break; case SpatialGestureSettings.NavigationRailsX: break; case SpatialGestureSettings.NavigationRailsY: break; case SpatialGestureSettings.NavigationRailsZ: break; default: break; } timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // videoRenderer.Update(timer); }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. renderingParameters.SetFocusPoint( currentCoordinateSystem, videoRenderer.Position ); } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp); ProcessFrame(currentCoordinateSystem); if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold) { if(pose != null) { var headPosition = pose.Head.Position; var headForward = pose.Head.ForwardDirection; quadRenderer.TargetPosition = headPosition + (2.0f * headForward); } textRenderer.RenderTextOffscreen("No faces detected"); } timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // quadRenderer.Update(pose, timer); }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold) { renderingParameters.SetFocusPoint( currentCoordinateSystem, quadRenderer.Position, quadRenderer.Forward, quadRenderer.Velocity ); } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return holographicFrame; }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(ref HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return(false); } // // TODO: Add code for pre-pass rendering here. // // Take care of any tasks that are not specific to an individual holographic // camera. This includes anything that doesn't need the final view or projection // matrix, such as lighting maps. // // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return(deviceResources.UseHolographicCameraResources( (Dictionary <uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // // TODO: Replace the sample content with your own content. // // Notes regarding holographic content: // * For drawing, remember that you have the potential to fill twice as many pixels // in a stereoscopic render target as compared to a non-stereoscopic render target // of the same resolution. Avoid unnecessary or repeated writes to the same pixel, // and only draw holograms that the user can see. // * To help occlude hologram geometry, you can create a depth map using geometry // data obtained via the surface mapping APIs. You can use this depth map to avoid // rendering holograms that are intended to be hidden behind tables, walls, // monitors, and so on. // * Black pixels will appear transparent to the user wearing the device, but you // should still use alpha blending to draw semitransparent holograms. You should // also clear the screen to Transparent as shown above. // // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem); // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; })); }
/// <summary> /// Renders the current frame to each holographic display, according to the /// current application and spatial positioning state. Returns true if the /// frame was rendered to at least one display. /// </summary> public bool Render(ref HolographicFrame holographicFrame) { // Don't try to render anything before the first Update. if (timer.FrameCount == 0) { return false; } // Up-to-date frame predictions enhance the effectiveness of image stablization and // allow more accurate positioning of holograms. holographicFrame.UpdateCurrentPrediction(); HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Lock the set of holographic camera resources, then draw to each camera // in this frame. return deviceResources.UseHolographicCameraResources( (Dictionary<uint, CameraResources> cameraResourceDictionary) => { bool atLeastOneCameraRendered = false; foreach (var cameraPose in prediction.CameraPoses) { // This represents the device-based resources for a HolographicCamera. CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id]; // Get the device context. var context = deviceResources.D3DDeviceContext; var renderTargetView = cameraResources.BackBufferRenderTargetView; var depthStencilView = cameraResources.DepthStencilView; // Set render targets to the current holographic camera. context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView); // Clear the back buffer and depth stencil view. SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f); context.ClearRenderTargetView(renderTargetView, transparent); context.ClearDepthStencilView( depthStencilView, SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil, 1.0f, 0); // The view and projection matrices for each holographic camera will change // every frame. This function refreshes the data in the constant buffer for // the holographic camera indicated by cameraPose. cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem); // Attach the view/projection constant buffer for this camera to the graphics pipeline. bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources); // Only render world-locked content when positional tracking is active. if (cameraActive) { quadRenderer.RenderRGB(textRenderer.Texture); } atLeastOneCameraRendered = true; } return atLeastOneCameraRendered; }); }
public override void Present() { HolographicFrame.PresentUsingCurrentPrediction(); }