/// <summary> /// /// </summary> public override void Prepare() { hmd.BeginFrame(0); OVR.EyeType eye = hmd.EyeRenderOrder[0]; eyeRenderPose[(int)eye] = hmd.GetHmdPosePerEye(eye); eye = hmd.EyeRenderOrder[1]; eyeRenderPose[(int)eye] = hmd.GetHmdPosePerEye(eye); var trackingState = hmd.GetTrackingState(oculus.GetTimeInSeconds()); var hPos = trackingState.HeadPose.ThePose.Position; var hRot = trackingState.HeadPose.ThePose.Orientation; var left = new OculusRiftSensors.Eye { Position = new Vector3(eyeRenderPose[0].Position.X, eyeRenderPose[0].Position.Y, eyeRenderPose[0].Position.Z), Rotation = new Quaternion(eyeRenderPose[0].Orientation.X, eyeRenderPose[0].Orientation.Y, eyeRenderPose[0].Orientation.Z, eyeRenderPose[0].Orientation.W), }; var right = new OculusRiftSensors.Eye { Position = new Vector3(eyeRenderPose[1].Position.X, eyeRenderPose[1].Position.Y, eyeRenderPose[1].Position.Z), Rotation = new Quaternion(eyeRenderPose[1].Orientation.X, eyeRenderPose[1].Orientation.Y, eyeRenderPose[1].Orientation.Z, eyeRenderPose[1].Orientation.W), }; var leftProj = oculus.Matrix4f_Projection(eyeRenderDesc[0].Fov, 0.1f, 1000.0f, true).ToMatrix(); leftProj.Transpose(); var rightProj = oculus.Matrix4f_Projection(eyeRenderDesc[1].Fov, 0.1f, 1000.0f, true).ToMatrix(); rightProj.Transpose(); left.Projection = leftProj; right.Projection = rightProj; OculusRiftSensors.LeftEye = left; OculusRiftSensors.RightEye = right; OculusRiftSensors.HeadPosition = new Vector3(hPos.X, hPos.Y, hPos.Z); OculusRiftSensors.HeadRotation = new Quaternion(hRot.X, hRot.Y, hRot.Z, hRot.W); //Console.WriteLine("Cam pose: " + trackingState.CameraPose.Position.X + " " + trackingState.CameraPose.Position.Y + " " +trackingState.CameraPose.Position.Z); //Console.WriteLine("Leveled Cam pose: " + trackingState.LeveledCameraPose.Position.X + " " + trackingState.LeveledCameraPose.Position.Y + " " + trackingState.LeveledCameraPose.Position.Z); }
public static void SetupTracking() { OculusTracking._hmd.SetEnabledCaps(OVR.HmdCaps.ovrHmdCap_Writable_Mask); OculusTracking._hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); OculusTracking.EyeViewport[] eyeViewportArray = new OculusTracking.EyeViewport[2]; OVR.FovPort[] fovPortArray = new OVR.FovPort[2]; for (int index = 0; index < 2; ++index) { OVR.EyeType eyeType = (OVR.EyeType)index; OculusTracking.EyeViewport eyeViewport = new OculusTracking.EyeViewport(); eyeViewport.FieldOFView = OculusTracking._hmd.DefaultEyeFov[index]; eyeViewport.ViewportSize = OculusTracking._hmd.GetFovTextureSize(eyeType, OculusTracking._hmd.DefaultEyeFov[index], 1f); eyeViewport.RenderDescription = OculusTracking._hmd.GetRenderDesc(eyeType, OculusTracking._hmd.DefaultEyeFov[index]); eyeViewport.HmdToEyeViewOffset = eyeViewport.RenderDescription.HmdToEyeViewOffset; fovPortArray[index] = eyeViewport.FieldOFView; eyeViewportArray[index] = eyeViewport; } OVR.Vector3f[] vector3fArray = new OVR.Vector3f[2] { eyeViewportArray[0].HmdToEyeViewOffset, eyeViewportArray[1].HmdToEyeViewOffset }; double[] numArray1 = new double[2] { Math.Atan((double)fovPortArray[0].LeftTan), Math.Atan((double)fovPortArray[1].LeftTan) }; double[] numArray2 = new double[2] { Math.Atan((double)fovPortArray[0].RightTan), Math.Atan((double)fovPortArray[1].RightTan) }; OculusTracking._startPos = new Point3d(0.0, 0.0, 0.0); OculusTracking._startDir = Transform.Rotation(new Vector3d(0.0, 1.0, 0.0), new Vector3d(0.0, 1.0, 0.0), new Point3d(0.0, 0.0, 0.0)); OculusTracking._hmdToEyeViewOffsets = vector3fArray; OculusTracking._fovL = numArray1; OculusTracking._fovR = numArray2; }
/// <summary> /// OVR initialization /// </summary> private void OVRInitialization() { try { this.adapter.GraphicsDevice.IsSrgbModeEnabled = true; var renderTargetManager = this.adapter.Graphics.RenderTargetManager as RenderTargetManager; // Specify which head tracking capabilities to enable. this.Hmd.SetEnabledCaps(OVR.HmdCaps.DebugDevice); // Start the sensor which informs of the Rift's pose and motion this.Hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); OVR.ovrResult result; // Retrieve the DXGI device, in order to set the maximum frame latency. using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface <SharpDX.DXGI.Device1>()) { dxgiDevice.MaximumFrameLatency = 1; } this.ovrLayers = new Layers(); this.layerEyeFov = this.ovrLayers.AddLayerEyeFov(); // Create a set of layers to submit. this.eyeTextures = new OculusVREyeTexture[2]; this.eyePoses = new VREyePose[3]; this.oculusEyePoses = new OVR.Posef[2]; this.hmdToEyeViewOffsets = new OVR.Vector3f[2]; result = this.CreateVRSwapTextureSet(); OculusVRHelpers.WriteErrorDetails(this.Oculus, result, "Failed to create swap texture set."); for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVR.EyeType eye = (OVR.EyeType)eyeIndex; OculusVREyeTexture eyeTexture = new OculusVREyeTexture(); this.eyeTextures[eyeIndex] = eyeTexture; // Retrieve size and position of the texture for the current eye. eyeTexture.FieldOfView = this.Hmd.DefaultEyeFov[eyeIndex]; eyeTexture.NearPlane = DefaultNearClip; eyeTexture.FarPlane = DefaultFarClip; eyeTexture.TextureSize = new OVR.Sizei(this.swapRenderTargets[0].Width, this.swapRenderTargets[0].Height); eyeTexture.RenderDescription = this.Hmd.GetRenderDesc(eye, this.Hmd.DefaultEyeFov[eyeIndex]); eyeTexture.HmdToEyeViewOffset = eyeTexture.RenderDescription.HmdToEyeViewOffset; eyeTexture.ViewportSize.Position = new OVR.Vector2i(this.recommendedTextureSize[0].Width * eyeIndex, 0); eyeTexture.ViewportSize.Size = this.recommendedTextureSize[eyeIndex]; eyeTexture.Viewport = new Viewport( eyeTexture.ViewportSize.Position.x / (float)this.swapRenderTargets[0].Width, eyeTexture.ViewportSize.Position.y / (float)this.swapRenderTargets[0].Height, eyeTexture.ViewportSize.Size.Width / (float)this.swapRenderTargets[0].Width, eyeTexture.ViewportSize.Size.Height / (float)this.swapRenderTargets[0].Height, 0.0f, 1.0f); this.hmdToEyeViewOffsets[eyeIndex] = eyeTexture.HmdToEyeViewOffset; // Specify the texture to show on the HMD. this.layerEyeFov.ColorTexture[eyeIndex] = this.eyeSwapTextureSet.SwapTextureSetPtr; this.layerEyeFov.Viewport[eyeIndex] = eyeTexture.ViewportSize; this.layerEyeFov.Fov[eyeIndex] = eyeTexture.FieldOfView; this.layerEyeFov.Header.Flags = OVR.LayerFlags.HighQuality; } // Define the texture used to display the rendered result on the computer monitor. Texture2DDescription mirrorTextureDescription = new Texture2DDescription(); mirrorTextureDescription.Width = this.Width; mirrorTextureDescription.Height = this.Height; mirrorTextureDescription.ArraySize = 1; mirrorTextureDescription.MipLevels = 1; mirrorTextureDescription.Format = Format.R8G8B8A8_UNorm_SRgb; mirrorTextureDescription.SampleDescription = new SampleDescription(1, 0); mirrorTextureDescription.Usage = ResourceUsage.Default; mirrorTextureDescription.CpuAccessFlags = CpuAccessFlags.None; mirrorTextureDescription.BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget; // Convert the SharpDX texture description to the native Direct3D texture description. OVR.D3D11.D3D11_TEXTURE2D_DESC mirrorTextureDescriptionD3D11 = OculusVRHelpers.CreateTexture2DDescription(mirrorTextureDescription); OculusWrap.D3D11.MirrorTexture mirrorTexture; // Create the texture used to display the rendered result on the computer monitor. result = this.Hmd.CreateMirrorTextureD3D11(device.NativePointer, ref mirrorTextureDescriptionD3D11, OVR.D3D11.SwapTextureSetD3D11Flags.None, out mirrorTexture); OculusVRHelpers.WriteErrorDetails(this.Oculus, result, "Failed to create mirror texture."); this.mirrorTexture = new Texture2D(mirrorTexture.Texture.Texture); this.HMDMirrorRenderTarget = renderTargetManager.CreateRenderTarget(this.mirrorTexture.NativePointer); WaveServices.RegisterService(new OculusVRService(this)); this.IsConnected = true; } catch (Exception e) { Console.WriteLine(e); } }