/// <summary> /// Gets the head pose at the current time or predicted at the given time. /// </summary> public OVRPose GetHeadPose(double predictionTime) { #if !UNITY_ANDROID || UNITY_EDITOR if (!OVRManager.instance.isVRPresent) { return(new OVRPose { position = Vector3.zero, orientation = Quaternion.identity, }); } double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; TrackingState state = OVRManager.capiHmd.GetTrackingState(abs_time_plus_pred); return(state.HeadPose.ThePose.ToPose(true)); #else float px = 0.0f, py = 0.0f, pz = 0.0f, ow = 0.0f, ox = 0.0f, oy = 0.0f, oz = 0.0f; double atTime = Time.time + predictionTime; OVR_GetCameraPositionOrientation(ref px, ref py, ref pz, ref ox, ref oy, ref oz, ref ow, atTime); return(new OVRPose { position = new Vector3(px, py, -pz), orientation = new Quaternion(-ox, -oy, oz, ow), }); #endif }
private bool isRiftConnected() { Hmd hmd = OVR.Hmd.GetHmd(); ovrTrackingState ss = hmd.GetTrackingState(); return((ss.StatusFlags & (uint)ovrStatusBits.ovrStatus_HmdConnected) != 0); }
/// <summary> /// /// </summary> /// <param name="parameters"></param> public OculusRiftDisplay( Game game, GraphicsDevice device, GameParameters parameters ) : base(game, device, parameters) { window = CreateForm(parameters, null); oculus = new Wrap(); // Initialize the Oculus runtime. oculus.Initialize(); // Use the head mounted display, if it's available, otherwise use the debug HMD. int numberOfHeadMountedDisplays = oculus.Hmd_Detect(); if (numberOfHeadMountedDisplays > 0) hmd = oculus.Hmd_Create(0); else hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); OVR.Recti destMirrorRect; OVR.Recti sourceRenderTargetRect; hmd.AttachToWindow(window.Handle, out destMirrorRect, out sourceRenderTargetRect); // Create a backbuffer that's the same size as the HMD's resolution. OVR.Sizei backBufferSize; backBufferSize.Width = hmd.Resolution.Width; backBufferSize.Height = hmd.Resolution.Height; var deviceFlags = DeviceCreationFlags.None; deviceFlags |= parameters.UseDebugDevice ? DeviceCreationFlags.Debug : DeviceCreationFlags.None; var driverType = DriverType.Hardware; var featureLevel = HardwareProfileChecker.GetFeatureLevel(parameters.GraphicsProfile); swapChainDesc = new SwapChainDescription { BufferCount = 1, ModeDescription = new ModeDescription(backBufferSize.Width, backBufferSize.Height, new Rational(60, 1), Format.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = window.Handle, SampleDescription = new SampleDescription(parameters.MsaaLevel, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput| Usage.ShaderInput, Flags = SwapChainFlags.None, }; D3D.Device.CreateWithSwapChain(driverType, deviceFlags, new[] { featureLevel }, swapChainDesc, out d3dDevice, out swapChain); var factory = swapChain.GetParent<Factory>(); factory.MakeWindowAssociation(window.Handle, WindowAssociationFlags.IgnoreAll); clientWidth = window.ClientSize.Width; clientHeight = window.ClientSize.Height; }
/// <summary> /// Configures the specified windows handler. /// </summary> /// <param name="windowsHandler">The windows handler.</param> public override void Configure(IntPtr windowsHandler) { base.Configure(windowsHandler); this.Oculus = new Wrap(); // Initialize the Oculus runtime. bool success = this.Oculus.Initialize(); if (!success) { Console.WriteLine("OVR Error: Failed to initialize the Oculus runtime library."); return; } // Use the head mounted display. OVR.GraphicsLuid graphicsLuid; this.Hmd = this.Oculus.Hmd_Create(out graphicsLuid); if (this.Hmd == null) { Console.WriteLine("OVR Error: Oculus Rift not detected."); return; } if (this.Hmd.ProductName == string.Empty) { Console.WriteLine("OVR Error: The HMD is not enabled."); return; } }
GetCameraPositionOrientation(ref Vector3 p, ref Quaternion o, double predictionTime = 0f) { if (HMD == null || !SupportedPlatform) { return(false); } float px = 0, py = 0, pz = 0, ow = 0, ox = 0, oy = 0, oz = 0; double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; ovrTrackingState ss = HMD.GetTrackingState(abs_time_plus_pred); px = ss.HeadPose.ThePose.Position.x; py = ss.HeadPose.ThePose.Position.y; pz = ss.HeadPose.ThePose.Position.z; ox = ss.HeadPose.ThePose.Orientation.x; oy = ss.HeadPose.ThePose.Orientation.y; oz = ss.HeadPose.ThePose.Orientation.z; ow = ss.HeadPose.ThePose.Orientation.w; p.x = px; p.y = py; p.z = -pz; o.w = ow; o.x = ox; o.y = oy; o.z = oz; // Convert to Left hand CS OrientSensor(ref o); return(true); }
void Awake() { // Detect whether this platform is a supported platform RuntimePlatform currPlatform = Application.platform; SupportedPlatform |= currPlatform == RuntimePlatform.Android; SupportedPlatform |= currPlatform == RuntimePlatform.LinuxPlayer; SupportedPlatform |= currPlatform == RuntimePlatform.OSXEditor; SupportedPlatform |= currPlatform == RuntimePlatform.OSXPlayer; SupportedPlatform |= currPlatform == RuntimePlatform.WindowsEditor; SupportedPlatform |= currPlatform == RuntimePlatform.WindowsPlayer; if (!SupportedPlatform) { Debug.LogWarning("This platform is unsupported"); return; } if (HMD != null) { return; } HMD = Hmd.GetHmd(); SetLowPersistenceMode(true); }
private void OnDisable() { if (ovrIsInitialized) { OVR_Destroy(); _capiHmd = null; ovrIsInitialized = false; } }
/// <summary> /// Gets the given eye's projection matrix. /// </summary> /// <param name="eyeId">Specifies the eye.</param> /// <param name="nearClip">The distance to the near clipping plane.</param> /// <param name="farClip">The distance to the far clipping plane.</param> public Matrix4x4 GetProjection(int eyeId, float nearClip, float farClip) { #if !UNITY_ANDROID || UNITY_EDITOR FovPort fov = OVRManager.capiHmd.GetDesc().DefaultEyeFov[eyeId]; return(Hmd.GetProjection(fov, nearClip, farClip, true).ToMatrix4x4()); #else return(new Matrix4x4()); #endif }
/// <summary> /// /// </summary> /// <param name="game"></param> /// <param name="device"></param> /// <param name="parameters"></param> public OculusRiftDisplay(Game game, GraphicsDevice device, GraphicsParameters parameters) : base(game, device, parameters) { oculus = new Wrap(); // Initialize the Oculus runtime. oculus.Initialize(); OVR.GraphicsLuid graphicsLuid; hmd = oculus.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); } parameters.Width = hmd.Resolution.Width; parameters.Height = hmd.Resolution.Height; window = CreateForm(parameters, null, false); var deviceFlags = DeviceCreationFlags.None; deviceFlags |= parameters.UseDebugDevice ? DeviceCreationFlags.Debug : DeviceCreationFlags.None; var driverType = DriverType.Hardware; var featureLevel = HardwareProfileChecker.GetFeatureLevel(parameters.GraphicsProfile); var swapChainDesc = new SwapChainDescription { BufferCount = 1, ModeDescription = new ModeDescription(hmd.Resolution.Width, hmd.Resolution.Height, new Rational(60, 1), Format.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = window.Handle, SampleDescription = new SampleDescription(parameters.MsaaLevel, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput | Usage.ShaderInput, Flags = SwapChainFlags.None, }; D3D.Device.CreateWithSwapChain(driverType, deviceFlags, new[] { featureLevel }, swapChainDesc, out d3dDevice, out swapChain); var myFactory = swapChain.GetParent <Factory>(); myFactory.MakeWindowAssociation(window.Handle, WindowAssociationFlags.IgnoreAll); }
void Awake() { if (OVRManager.capiHmd == null) { OVR_Initialize(); } IntPtr hmdPtr = IntPtr.Zero; OVR_GetHMD(ref hmdPtr); ovrHmd = (hmdPtr != IntPtr.Zero) ? new Hmd(hmdPtr) : null; }
/// <summary> /// Gets the given eye's projection matrix. /// </summary> /// <param name="eyeId">Specifies the eye.</param> /// <param name="nearClip">The distance to the near clipping plane.</param> /// <param name="farClip">The distance to the far clipping plane.</param> public Matrix4x4 GetProjection(int eyeId, float nearClip, float farClip) { #if !UNITY_ANDROID || UNITY_EDITOR FovPort fov = OVRManager.capiHmd.GetDesc().DefaultEyeFov[eyeId]; uint projectionModFlags = (uint)Hmd.ProjectionModifier.RightHanded; return(Hmd.GetProjection(fov, nearClip, farClip, projectionModFlags).ToMatrix4x4()); #else return(new Matrix4x4()); #endif }
/// <summary> /// Destroy this instance. /// </summary> void OnDestroy() { // We may want to turn this off so that values are maintained between level / scene loads if (!ResetTrackerOnLoad || HMD == null) { return; } HMD.Destroy(); Hmd.Shutdown(); HMD = null; }
/// <summary> /// Gets the camera projection matrix. /// </summary> /// <returns><c>true</c>, if camera projection matrix was gotten, <c>false</c> otherwise.</returns> /// <param name="eyeId">Eye Id - Left = 0, Right = 1.</param> /// <param name="nearClip">Near Clip Plane of the camera.</param> /// <param name="farClip">Far Clip Plane of the camera.</param> /// <param name="mat">The generated camera projection matrix.</param> public static bool GetCameraProjection(int eyeId, float nearClip, float farClip, ref Matrix4x4 mat) { if (HMD == null || !SupportedPlatform) { return(false); } ovrFovPort fov = HMD.GetDesc().DefaultEyeFov[eyeId]; mat = Hmd.GetProjection(fov, nearClip, farClip, true).ToMatrix4x4(); return(true); }
IEnumerator CallbackCoroutine() { OVRDevice.HMD = Hmd.GetHmd(); while (true) { #if UNITY_EDITOR_WIN || (!UNITY_EDITOR_OSX && UNITY_STANDALONE_OSX) yield return(new WaitForEndOfFrame()); #else yield return(null); #endif OnCoroutine(); } }
/// <summary> /// Updates the state of the buttons in the Remote /// </summary> /// <param name="hmd">The HMD handler</param> internal void Update(Hmd hmd) { OVRTypes.InputState inputState = hmd.GetInputState(OVRTypes.ControllerType.Remote); this.IsConnected = inputState.ControllerType == OVRTypes.ControllerType.Remote; this.Up = (inputState.Buttons & (uint)OVRTypes.Button.Up) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Down = (inputState.Buttons & (uint)OVRTypes.Button.Down) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Left = (inputState.Buttons & (uint)OVRTypes.Button.Left) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Right = (inputState.Buttons & (uint)OVRTypes.Button.Right) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Start = (inputState.Buttons & (uint)OVRTypes.Button.Enter) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Back = (inputState.Buttons & (uint)OVRTypes.Button.Back) != 0 ? ButtonState.Pressed : ButtonState.Released; }
/// <summary> /// /// </summary> /// <param name="game"></param> /// <param name="device"></param> /// <param name="parameters"></param> public OculusRiftDisplay( Game game, GraphicsDevice device, GraphicsParameters parameters ) : base( game, device, parameters ) { oculus = new Wrap(); // Initialize the Oculus runtime. oculus.Initialize(); OVR.GraphicsLuid graphicsLuid; hmd = oculus.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); parameters.Width = hmd.Resolution.Width; parameters.Height = hmd.Resolution.Height; window = CreateForm(parameters, null); var deviceFlags = DeviceCreationFlags.None; deviceFlags |= parameters.UseDebugDevice ? DeviceCreationFlags.Debug : DeviceCreationFlags.None; var driverType = DriverType.Hardware; var featureLevel = HardwareProfileChecker.GetFeatureLevel(parameters.GraphicsProfile); var swapChainDesc = new SwapChainDescription { BufferCount = 1, ModeDescription = new ModeDescription(hmd.Resolution.Width, hmd.Resolution.Height, new Rational(60, 1), Format.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = window.Handle, SampleDescription = new SampleDescription(parameters.MsaaLevel, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput| Usage.ShaderInput, Flags = SwapChainFlags.None, }; D3D.Device.CreateWithSwapChain(driverType, deviceFlags, new[] { featureLevel }, swapChainDesc, out d3dDevice, out swapChain); var myFactory = swapChain.GetParent<Factory>(); myFactory.MakeWindowAssociation(window.Handle, WindowAssociationFlags.IgnoreAll); }
/// <summary> /// Gets the tracker's pose, relative to the head's pose at the time of the last pose recentering. /// </summary> public OVRPose GetPose(double predictionTime = 0d) { #if !UNITY_ANDROID || UNITY_EDITOR double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; return(OVRManager.capiHmd.GetTrackingState(abs_time_plus_pred).CameraPose.ToPose()); #else return(new OVRPose { position = Vector3.zero, orientation = Quaternion.identity }); #endif }
private void OnDisable() { if (!isQuitting) { return; } if (ovrIsInitialized) { OVR_Destroy(); OVRPluginEvent.Issue(RenderEventType.Destroy); _capiHmd = null; ovrIsInitialized = false; } }
/// <summary> /// Gets the tracker's pose, relative to the head's pose at the time of the last pose recentering. /// </summary> public OVRPose GetPose(double predictionTime) { #if !UNITY_ANDROID || UNITY_EDITOR if (OVRManager.instance.isVRPresent) { double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; return(OVRManager.capiHmd.GetTrackingState(abs_time_plus_pred).CameraPose.ToPose(true)); } #endif return(new OVRPose { position = Vector3.zero, orientation = Quaternion.identity }); }
/// <summary> /// Creates a handle to an HMD. /// /// Upon success the returned Hmd must be eventually freed with Dispose() when it is no longer needed. /// </summary> /// <param name="graphicsLuid"> /// Provides a system specific graphics adapter identifier that locates which /// graphics adapter has the HMD attached. This must match the adapter used by the application /// or no rendering output will be possible. This is important for stability on multi-adapter systems. An /// application that simply chooses the default adapter will not run reliably on multi-adapter systems. /// </param> public Hmd Hmd_Create(out OVRTypes.GraphicsLuid graphicsLuid) { IntPtr hmdPtr = IntPtr.Zero; graphicsLuid = new OVRTypes.GraphicsLuid(); OVRTypes.Result result = OVR.Create(ref hmdPtr, ref graphicsLuid); if (result < OVRTypes.Result.Success) { return(null); } Hmd hmd = new Hmd(OVR, hmdPtr); // Ensure that this created HMD is disposed, when this Wrap class is being disposed. CreatedHmds.Add(hmd); return(hmd); }
private void Startup() { if (NativeHelpers.Services_GetSteamLoadStatus() == LoadStatus.NotLoaded) { // Only startup the native parts if they are not loaded yet if (!NativeMethods.Services_Startup(Constants.VersionInfo.InterfaceID)) { // Setup failed! Instance = null; ErrorCodes error = NativeHelpers.Services_GetErrorCode(); if (error == ErrorCodes.InvalidInterfaceVersion) { Error.ThrowError(ErrorCodes.InvalidInterfaceVersion, NativeMethods.Services_GetInterfaceVersion(), Constants.VersionInfo.InterfaceID); } else { Error.ThrowError(error); } } } AppID = new SteamTypes.AppID(NativeMethods.Services_GetAppID()); serviceJobs = new JobManager(); serviceJobs.AddJob(new DelegateJob(() => RegisterManagedCallback(), () => RemoveManagedCallback())); serviceJobs.AddJob(new DelegateJob(() => cloud = new Cloud(), () => cloud.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => stats = new Stats(), () => stats.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => user = new User(), () => user.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => friends = new Friends(), () => friends.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => matchmaking = new MatchMaking(), () => matchmaking.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => matchmakingServers = new MatchmakingServers(), () => matchmakingServers.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => networking = new Networking(), () => networking.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => utils = new Utils(), () => utils.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => apps = new Apps(), () => apps.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => http = new HTTP(), () => http.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => screenshots = new Screenshots(), () => screenshots.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => ugc = new UGC(), () => ugc.ReleaseManagedResources())); serviceJobs.AddJob(new DelegateJob(() => steamcontroller = new SteamController(), () => steamcontroller.ReleaseManagedResources())); hmd = new Hmd(); serviceJobs.RunCreateJobs(); }
public OvrSharedRendertarget(int w, int h, Hmd hmd) { width = w; height = h; hmd.CreateSwapTextureSetGL((uint)All.Srgb8Alpha8, width, height, out textureSet); for (int i = 0; i < textureSet.TextureCount; i++) { GL.BindTexture(TextureTarget.Texture2D, textureSet.Textures[i].TexId); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge); } GL.GenFramebuffers(1, out fboId); }
/// <summary> /// Updates the state of the buttons and poses in the Oculus Touch controllers /// </summary> /// <param name="hmd">The HMD handler</param> internal void Update(Hmd hmd) { OVRTypes.InputState inputState = hmd.GetInputState(OVRTypes.ControllerType.Touch); this.controllerType = inputState.ControllerType; this.IsConnected = this.controllerType == OVRTypes.ControllerType.Touch; // Update button state this.A = (inputState.Buttons & (uint)OVRTypes.Button.A) != 0 ? ButtonState.Pressed : ButtonState.Released; this.B = (inputState.Buttons & (uint)OVRTypes.Button.B) != 0 ? ButtonState.Pressed : ButtonState.Released; this.RThumb = (inputState.Buttons & (uint)OVRTypes.Button.RThumb) != 0 ? ButtonState.Pressed : ButtonState.Released; this.X = (inputState.Buttons & (uint)OVRTypes.Button.X) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Y = (inputState.Buttons & (uint)OVRTypes.Button.Y) != 0 ? ButtonState.Pressed : ButtonState.Released; this.LThumb = (inputState.Buttons & (uint)OVRTypes.Button.LThumb) != 0 ? ButtonState.Pressed : ButtonState.Released; this.Start = (inputState.Buttons & (uint)OVRTypes.Button.Enter) != 0 ? ButtonState.Pressed : ButtonState.Released; // Update touch state this.TouchState.A = (inputState.Touches & (uint)OVRTypes.Touch.A) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.B = (inputState.Touches & (uint)OVRTypes.Touch.B) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.RThumb = (inputState.Touches & (uint)OVRTypes.Touch.RThumb) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.RIndexTrigger = (inputState.Touches & (uint)OVRTypes.Touch.RIndexTrigger) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.X = (inputState.Touches & (uint)OVRTypes.Touch.X) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.Y = (inputState.Touches & (uint)OVRTypes.Touch.Y) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.LThumb = (inputState.Touches & (uint)OVRTypes.Touch.LThumb) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.LIndexTrigger = (inputState.Touches & (uint)OVRTypes.Touch.LIndexTrigger) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.RIndexPointing = (inputState.Touches & (uint)OVRTypes.Touch.RIndexPointing) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.RThumbUp = (inputState.Touches & (uint)OVRTypes.Touch.RThumbUp) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.LIndexPointing = (inputState.Touches & (uint)OVRTypes.Touch.LIndexPointing) != 0 ? ButtonState.Pressed : ButtonState.Released; this.TouchState.LThumbUp = (inputState.Touches & (uint)OVRTypes.Touch.LThumbUp) != 0 ? ButtonState.Pressed : ButtonState.Released; // Update axes this.LeftIndexTrigger = inputState.IndexTrigger[(int)OVRTypes.HandType.Left]; this.RightIndexTrigger = inputState.IndexTrigger[(int)OVRTypes.HandType.Right]; this.LeftHandTrigger = inputState.HandTrigger[(int)OVRTypes.HandType.Left]; this.RightHandTrigger = inputState.HandTrigger[(int)OVRTypes.HandType.Right]; this.LeftThumbstick.X = inputState.Thumbstick[(int)OVRTypes.HandType.Left].X; this.LeftThumbstick.Y = inputState.Thumbstick[(int)OVRTypes.HandType.Left].Y; this.RightThumbstick.X = inputState.Thumbstick[(int)OVRTypes.HandType.Right].X; this.RightThumbstick.Y = inputState.Thumbstick[(int)OVRTypes.HandType.Right].Y; }
void Awake() { string[] args = System.Environment.GetCommandLineArgs(); for (int i = 0; i < args.Length; ++i) { if (args[i] == "-fullscreen") { Debug.Log("Going to Full-Screen"); Screen.fullScreen = true; } else if (args[i] == "-window") { Debug.Log("Going to Window"); Screen.fullScreen = false; } } // Detect whether this platform is a supported platform RuntimePlatform currPlatform = Application.platform; SupportedPlatform |= currPlatform == RuntimePlatform.Android; SupportedPlatform |= currPlatform == RuntimePlatform.LinuxPlayer; SupportedPlatform |= currPlatform == RuntimePlatform.OSXEditor; SupportedPlatform |= currPlatform == RuntimePlatform.OSXPlayer; SupportedPlatform |= currPlatform == RuntimePlatform.WindowsEditor; SupportedPlatform |= currPlatform == RuntimePlatform.WindowsPlayer; if (!SupportedPlatform) { Debug.LogWarning("This platform is unsupported"); return; } if (HMD != null) { return; } HMD = Hmd.GetHmd(); //HACK: Forcing LP off until service initializes it properly. SetLowPersistenceMode(true); }
private void OnDisable() { #if !UNITY_ANDROID || UNITY_EDITOR if (!isQuitting) { return; } if (ovrIsInitialized) { OVR_Destroy(); OVRPluginEvent.Issue(RenderEventType.Destroy); _capiHmd = null; ovrIsInitialized = false; } #endif // NOTE: The coroutines will also be stopped when the object is destroyed. StopAllCoroutines(); }
private void OnDisable() { #if !UNITY_ANDROID || UNITY_EDITOR if (!isQuitting) { return; } if (ovrIsInitialized) { OVR_Destroy(); OVRPluginEvent.Issue(RenderEventType.Destroy); _capiHmd = null; ovrIsInitialized = false; } #else StopAllCoroutines(); #endif }
public static bool CreatOculusHmd() { OculusTracking._oculus = (Wrap)null; OculusTracking._hmd = (Hmd)null; OculusTracking._oculus = new Wrap(); if (!OculusTracking._oculus.Initialize((OVR.ovrInitParams)null)) { int num = (int)MessageBox.Show("Failed to initialize the Oculus runtime library.", "Runtime Warning", MessageBoxButtons.OK, MessageBoxIcon.Hand); OculusTracking._oculus = (Wrap)null; return(false); } if (OculusTracking._oculus.Hmd_Detect() > 0) { OculusTracking._hmd = OculusTracking._oculus.Hmd_Create(0); if (OculusTracking._hmd == null) { int num = (int)MessageBox.Show("Oculus Rift not detected.", "Connection Warning", MessageBoxButtons.OK, MessageBoxIcon.Hand); OculusTracking._oculus.Dispose(); OculusTracking._oculus = (Wrap)null; return(false); } if (!(OculusTracking._hmd.ProductName == string.Empty)) { return(true); } int num1 = (int)MessageBox.Show("The HMD is not enabled.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Hand); OculusTracking._oculus.Dispose(); OculusTracking._oculus = (Wrap)null; OculusTracking._hmd.Dispose(); OculusTracking._hmd = (Hmd)null; return(false); } int num2 = (int)MessageBox.Show("Oculus Rift not connected.", "Connection Warning", MessageBoxButtons.OK, MessageBoxIcon.Hand); OculusTracking._oculus.Dispose(); OculusTracking._oculus = (Wrap)null; return(false); }
/// <summary> /// Configures the specified windows handler. /// </summary> /// <param name="windowsHandler">The windows handler.</param> public override void Configure(IntPtr windowsHandler) { base.Configure(windowsHandler); this.Oculus = new Wrap(); // Initialize the Oculus runtime. OVRTypes.InitParams initializationParameters = new OVRTypes.InitParams(); initializationParameters.Flags = OVRTypes.InitFlags.RequestVersion; initializationParameters.RequestedMinorVersion = 0; #if DEBUG initializationParameters.Flags |= OVRTypes.InitFlags.Debug; #endif bool success = this.Oculus.Initialize(initializationParameters); if (!success) { Console.WriteLine("OVR Error: Failed to initialize the Oculus runtime library."); return; } // Use the head mounted display. OVRTypes.GraphicsLuid graphicsLuid; this.Hmd = this.Oculus.Hmd_Create(out graphicsLuid); if (this.Hmd == null) { Console.WriteLine("OVR Error: Oculus Rift not detected."); return; } if (this.Hmd.ProductName == string.Empty) { Console.WriteLine("OVR Error: The HMD is not enabled."); return; } }
override protected void Render() { Lock = true; using (Wrap oculus = new Wrap()) { // Initialize the Oculus runtime. if (!oculus.Initialize(initializationParameters)) { throw new HeadsetError("Failed to initialize the Oculus runtime library."); } OVRTypes.GraphicsLuid graphicsLuid; // Create a set of layers to submit. EyeTexture[] eyeTextures = new EyeTexture[2]; // Create a depth buffer, using the same width and height as the back buffer. Texture2DDescription depthBufferDescription = new Texture2DDescription() { Format = Format.D32_Float, ArraySize = 1, MipLevels = 1, Width = 1920, // TODO: FIXME? Height = 1080, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; // Define how the depth buffer will be used to filter out objects, based on their distance from the viewer. DepthStencilStateDescription depthStencilStateDescription = new DepthStencilStateDescription() { IsDepthEnabled = true, DepthComparison = Comparison.Less, DepthWriteMask = DepthWriteMask.Zero }; //#if DEBUG // SharpDX.Configuration.EnableObjectTracking = true; //#endif using (Hmd hmd = oculus.Hmd_Create(out graphicsLuid)) // Create DirectX drawing device. using (_device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.BgraSupport, new SharpDX.Direct3D.FeatureLevel[] { SharpDX.Direct3D.FeatureLevel.Level_10_0 })) // Create DirectX Graphics Interface factory, used to create the swap chain. using (Factory factory = new Factory()) using (DeviceContext immediateContext = _device.ImmediateContext) // Create the depth buffer. using (Texture2D depthBuffer = new Texture2D(_device, depthBufferDescription)) using (DepthStencilView depthStencilView = new DepthStencilView(_device, depthBuffer)) using (DepthStencilState depthStencilState = new DepthStencilState(_device, depthStencilStateDescription)) using (Layers layers = new Layers()) using (_gd = SharpDX.Toolkit.Graphics.GraphicsDevice.New(_device)) using (vrui = new VRUI(_device, _gd)) using (customEffectL = GetCustomEffect(_gd)) using (customEffectR = GetCustomEffect(_gd)) //using (SharpDX.Toolkit.Graphics.GeometricPrimitive primitive = GraphicTools.CreateGeometry(_projection, _gd, false)) { if (hmd == null) { throw new HeadsetError("Oculus Rift not detected."); } if (hmd.ProductName == string.Empty) { throw new HeadsetError("The HMD is not enabled."); } Viewport viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f); LayerEyeFov layerEyeFov = layers.AddLayerEyeFov(); // Retrieve the DXGI device, in order to set the maximum frame latency. using (SharpDX.DXGI.Device1 dxgiDevice = _device.QueryInterface <SharpDX.DXGI.Device1>()) { dxgiDevice.MaximumFrameLatency = 1; } for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVRTypes.EyeType eye = (OVRTypes.EyeType)eyeIndex; var textureSize = hmd.GetFovTextureSize(eye, hmd.DefaultEyeFov[eyeIndex], 1.0f); var renderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); EyeTexture eyeTexture = eyeTextures[eyeIndex] = new EyeTexture() { // Retrieve size and position of the texture for the current eye. FieldOfView = hmd.DefaultEyeFov[eyeIndex], TextureSize = textureSize, RenderDescription = renderDescription, // Define a texture at the size recommended for the eye texture. Viewport = new Viewport(0, 0, textureSize.Width, textureSize.Height, 0.0f, 1.0f), HmdToEyeViewOffset = renderDescription.HmdToEyeOffset, Texture2DDescription = new Texture2DDescription() { Width = textureSize.Width, Height = textureSize.Height, ArraySize = 1, MipLevels = 1, Format = Format.R8G8B8A8_UNorm_SRgb, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget } }; eyeTexture.ViewportSize.Position = new OVRTypes.Vector2i(0, 0); eyeTexture.ViewportSize.Size = textureSize; // Convert the SharpDX texture description to the native Direct3D texture description. OVRTypes.TextureSwapChainDesc textureSwapChainDesc = SharpDXHelpers.CreateTextureSwapChainDescription(eyeTexture.Texture2DDescription); AssertSuccess(hmd.CreateTextureSwapChainDX(_device.NativePointer, textureSwapChainDesc, out eyeTexture.SwapTextureSet), oculus, "Failed to create swap chain."); // Retrieve the number of buffers of the created swap chain. int textureSwapChainBufferCount; AssertSuccess(eyeTexture.SwapTextureSet.GetLength(out textureSwapChainBufferCount), oculus, "Failed to retrieve the number of buffers of the created swap chain."); // Create room for each DirectX texture in the SwapTextureSet. eyeTexture.Textures = new Texture2D[textureSwapChainBufferCount]; eyeTexture.RenderTargetViews = new RenderTargetView[textureSwapChainBufferCount]; // Create a texture 2D and a render target view, for each unmanaged texture contained in the SwapTextureSet. for (int textureIndex = 0; textureIndex < textureSwapChainBufferCount; textureIndex++) { // Interface ID of the Direct3D Texture2D interface. Guid textureInterfaceId = new Guid("6f15aaf2-d208-4e89-9ab4-489535d34f9c"); // Retrieve the Direct3D texture contained in the Oculus TextureSwapChainBuffer. IntPtr swapChainTextureComPtr = IntPtr.Zero; AssertSuccess(eyeTexture.SwapTextureSet.GetBufferDX(textureIndex, textureInterfaceId, out swapChainTextureComPtr), oculus, "Failed to retrieve a texture from the created swap chain."); // Create a managed Texture2D, based on the unmanaged texture pointer. eyeTexture.Textures[textureIndex] = new Texture2D(swapChainTextureComPtr); // Create a render target view for the current Texture2D. eyeTexture.RenderTargetViews[textureIndex] = new RenderTargetView(_device, eyeTexture.Textures[textureIndex]); } // Define the depth buffer, at the size recommended for the eye texture. eyeTexture.DepthBufferDescription = new Texture2DDescription() { Format = Format.D32_Float, Width = eyeTexture.TextureSize.Width, Height = eyeTexture.TextureSize.Height, ArraySize = 1, MipLevels = 1, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; // Create the depth buffer. eyeTexture.DepthBuffer = new Texture2D(_device, eyeTexture.DepthBufferDescription); eyeTexture.DepthStencilView = new DepthStencilView(_device, eyeTexture.DepthBuffer); // Specify the texture to show on the HMD. layerEyeFov.ColorTexture[eyeIndex] = eyeTexture.SwapTextureSet.TextureSwapChainPtr; layerEyeFov.Viewport[eyeIndex].Position = new OVRTypes.Vector2i(0, 0); layerEyeFov.Viewport[eyeIndex].Size = eyeTexture.TextureSize; layerEyeFov.Fov[eyeIndex] = eyeTexture.FieldOfView; layerEyeFov.Header.Flags = OVRTypes.LayerFlags.HighQuality; } #region Render loop DateTime startTime = DateTime.Now; DateTime lastTime = DateTime.Now; float deltaTime = 0; // Start with default background SetDefaultScene(); while (!abort) { UpdateContentIfRequested(); OVRTypes.Vector3f[] hmdToEyeViewOffsets = { eyeTextures[0].HmdToEyeViewOffset, eyeTextures[1].HmdToEyeViewOffset }; //OVR.FrameTiming frameTiming = hmd.GetFrameTiming(0); //OVR.TrackingState trackingState = hmd.GetTrackingState(frameTiming.DisplayMidpointSeconds); double displayMidpoint = hmd.GetPredictedDisplayTime(0); OVRTypes.TrackingState trackingState = hmd.GetTrackingState(displayMidpoint, true); OVRTypes.Posef[] eyePoses = new OVRTypes.Posef[2]; // Calculate the position and orientation of each eye. oculus.CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeViewOffsets, ref eyePoses); float timeSinceStart = (float)(DateTime.Now - startTime).TotalSeconds; deltaTime = (float)(DateTime.Now - lastTime).TotalSeconds; lastTime = DateTime.Now; Vector3 centerEye = (eyePoses[0].Position.ToVector3() + eyePoses[1].Position.ToVector3()) * 0.5f; for (int eyeIndex = 0; eyeIndex < 2; eyeIndex++) { OVRTypes.EyeType eye = (OVRTypes.EyeType)eyeIndex; EyeTexture eyeTexture = eyeTextures[eyeIndex]; layerEyeFov.RenderPose[eyeIndex] = eyePoses[eyeIndex]; // Update the render description at each frame, as the HmdToEyeOffset can change at runtime. eyeTexture.RenderDescription = hmd.GetRenderDesc(eye, hmd.DefaultEyeFov[eyeIndex]); // Retrieve the index of the active texture int textureIndex; AssertSuccess(eyeTexture.SwapTextureSet.GetCurrentIndex(out textureIndex), oculus, "Failed to retrieve texture swap chain current index."); immediateContext.OutputMerger.SetRenderTargets(eyeTexture.DepthStencilView, eyeTexture.RenderTargetViews[textureIndex]); immediateContext.ClearRenderTargetView(eyeTexture.RenderTargetViews[textureIndex], Color.Black); immediateContext.ClearDepthStencilView(eyeTexture.DepthStencilView, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0); immediateContext.Rasterizer.SetViewport(eyeTexture.Viewport); // Retrieve the eye rotation quaternion and use it to calculate the LookAt direction and the LookUp direction. Quaternion lookRotation = SharpDXHelpers.ToQuaternion(eyePoses[eyeIndex].Orientation); lookRotation = new Quaternion(1, 0, 0, 0) * lookRotation; Matrix rotationMatrix = Matrix.RotationQuaternion(lookRotation); Vector3 lookUp = Vector3.Transform(new Vector3(0, -1, 0), rotationMatrix).ToVector3(); Vector3 lookAt = Vector3.Transform(new Vector3(0, 0, 1), rotationMatrix).ToVector3(); //Vector3 eyeDiff = eyePoses[eyeIndex].Position.ToVector3() - eyePoses[1 - eyeIndex].Position.ToVector3(); Vector3 lookPosition = new Vector3( -eyePoses[eyeIndex].Position.X, eyePoses[eyeIndex].Position.Y, eyePoses[eyeIndex].Position.Z ); Matrix worldMatrix = Matrix.Translation(lookPosition); Matrix viewMatrix = Matrix.LookAtLH(lookPosition, lookPosition + lookAt, lookUp); Matrix projectionMatrix = oculus.Matrix4f_Projection(eyeTexture.FieldOfView, 0.1f, 100.0f, OVRTypes.ProjectionModifier.LeftHanded).ToMatrix(); projectionMatrix.Transpose(); Matrix MVP = worldMatrix * viewMatrix * projectionMatrix; customEffectL.Parameters["WorldViewProj"].SetValue(MVP); customEffectR.Parameters["WorldViewProj"].SetValue(MVP); lock (localCritical) { try { if (eyeIndex == 0) { primitive?.Draw(customEffectL); } if (eyeIndex == 1) { primitive?.Draw(customEffectR); } } catch (NullReferenceException) { ; } } if (ProvideLook != null && eyeIndex == 0) { lookRotation.Invert(); lookRotation = lookRotation * new Quaternion(1, 0, 0, 0); // rotate 180 in x Vector3 forward = Vector3.Transform(Vector3.ForwardRH, lookRotation); Vector3 up = Vector3.Transform(Vector3.Up, lookRotation); log.Publish("oculus.forward", forward.ToString("0.00")); log.Publish("oculus.up", up.ToString("0.00")); log.Publish("oculus.lookAt", lookAt.ToString("0.00")); log.Publish("oculus.lookUp", lookUp.ToString("0.00")); log.Publish("oculus.vr_quat", lookRotation); log.Publish("q.sent", lookRotation); ProvideLook(lookPosition, lookRotation, OculusFOV); } // reset UI position every frame if it is not visible if (vrui.isUIHidden) { vrui.SetWorldPosition(viewMatrix.Forward, lookPosition, false); } vrui.Draw(Media, currentTime, Duration); vrui.Render(deltaTime, viewMatrix, projectionMatrix, lookPosition, ShouldShowVRUI); // Commits any pending changes to the TextureSwapChain, and advances its current index AssertSuccess(eyeTexture.SwapTextureSet.Commit(), oculus, "Failed to commit the swap chain texture."); //Console.WriteLine("xbox: " + ((hmd.ovr_GetConnectedControllerTypes() & OVRTypes.ControllerType.XBox) != 0)); //Console.WriteLine("remote: " + ((hmd.ovr_GetConnectedControllerTypes() & OVRTypes.ControllerType.Remote) != 0)); //Console.WriteLine("active: " + hmd.GetInputState(OVRTypes.ControllerType.Active)); //Console.WriteLine("buttons: " + hmd.GetInputState(OVRTypes.ControllerType.Remote).Buttons); } hmd.SubmitFrame(0, layers); } #endregion //debugWindow.Stop(); waitForRendererStop.Set(); // Release all resources primitive?.Dispose(); eyeTextures[0].Dispose(); eyeTextures[1].Dispose(); immediateContext.ClearState(); immediateContext.Flush(); } } Lock = false; }
public TextureBuffer(Wrap oculus, Hmd hmd, bool rendertarget, bool displayableOnHmd, OVRTypes.Sizei size, int mipLevels, IntPtr data, int sampleCount) { wrap = oculus; texSize = size; OVRTypes.Result result; Debug.Assert(sampleCount <= 1); // The code doesn't currently handle MSAA textures. if (displayableOnHmd) { // This texture isn't necessarily going to be a rendertarget, but it usually is. Debug.Assert(hmd != null); // No HMD? A little odd. // It looks like that in order to disable sRGB we have to // set the texture format to sRGB but omit the call the // enabling sRGB in the framebuffer. Info here: // https://forums.oculus.com/community/discussion/24347/srgb-and-sdk-0-6-0-0 OVRTypes.TextureSwapChainDesc desc = new OVRTypes.TextureSwapChainDesc(); desc.Type = OVRTypes.TextureType.Texture2D; desc.ArraySize = 1; desc.Width = size.Width; desc.Height = size.Height; desc.MipLevels = mipLevels; desc.Format = OVRTypes.TextureFormat.R8G8B8A8_UNORM_SRGB; //Remember to call GL.Disable(EnableCap.FramebufferSrgb) later desc.SampleCount = sampleCount; desc.StaticImage = 0; if (mipLevels > 1) desc.MiscFlags = OVRTypes.TextureMiscFlags.AllowGenerateMips; result = hmd.CreateTextureSwapChainGL(desc, out textureChain); WriteErrorDetails(wrap, result, "Failed to create swap chain."); int length = 0; result = TextureChain.GetLength(out length); WriteErrorDetails(wrap, result, "Failed to retrieve the number of buffers of the created swap chain."); if (result >= OVRTypes.Result.Success) { for (int i = 0; i < length; ++i) { uint chainTexId; // Retrieve the OpenGL texture contained in the Oculus TextureSwapChainBuffer. result = TextureChain.GetBufferGL(i, out chainTexId); WriteErrorDetails(wrap, result, "Failed to retrieve a texture from the created swap chain."); GL.BindTexture(TextureTarget.Texture2D, chainTexId); if (rendertarget) { GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge); } else { GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.LinearMipmapLinear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.Repeat); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.Repeat); } } } } else { GL.GenTextures(1, out texId); GL.BindTexture(TextureTarget.Texture2D, texId); if (rendertarget) { GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.ClampToEdge); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.ClampToEdge); } else { GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.LinearMipmapLinear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.Repeat); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.Repeat); } GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Srgb8Alpha8, texSize.Width, texSize.Height, 0, PixelFormat.Rgba, PixelType.UnsignedByte, IntPtr.Zero); } if (mipLevels > 1) { GL.GenerateMipmap(GenerateMipmapTarget.Texture2D); } GL.GenFramebuffers(1, out fboId); }
/// <summary> /// /// </summary> /// <param name="parameters"></param> public OculusRiftDisplay(Game game, GraphicsDevice device, GameParameters parameters) : base(game, device, parameters) { window = CreateForm(parameters, null); oculus = new Wrap(); // Initialize the Oculus runtime. oculus.Initialize(); // Use the head mounted display, if it's available, otherwise use the debug HMD. int numberOfHeadMountedDisplays = oculus.Hmd_Detect(); if (numberOfHeadMountedDisplays > 0) { hmd = oculus.Hmd_Create(0); } else { hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2); } if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); } OVR.Recti destMirrorRect; OVR.Recti sourceRenderTargetRect; hmd.AttachToWindow(window.Handle, out destMirrorRect, out sourceRenderTargetRect); // Create a backbuffer that's the same size as the HMD's resolution. OVR.Sizei backBufferSize; backBufferSize.Width = hmd.Resolution.Width; backBufferSize.Height = hmd.Resolution.Height; Input.OculusRiftSensors.RecenterPosition = hmd.RecenterPose; var deviceFlags = DeviceCreationFlags.None; deviceFlags |= parameters.UseDebugDevice ? DeviceCreationFlags.Debug : DeviceCreationFlags.None; var driverType = DriverType.Hardware; var featureLevel = HardwareProfileChecker.GetFeatureLevel(parameters.GraphicsProfile); swapChainDesc = new SwapChainDescription { BufferCount = 1, ModeDescription = new ModeDescription(backBufferSize.Width, backBufferSize.Height, new Rational(60, 1), Format.R8G8B8A8_UNorm), IsWindowed = true, OutputHandle = window.Handle, SampleDescription = new SampleDescription(parameters.MsaaLevel, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput | Usage.ShaderInput, Flags = SwapChainFlags.None, }; D3D.Device.CreateWithSwapChain(driverType, deviceFlags, new[] { featureLevel }, swapChainDesc, out d3dDevice, out swapChain); var factory = swapChain.GetParent <Factory>(); factory.MakeWindowAssociation(window.Handle, WindowAssociationFlags.IgnoreAll); clientWidth = window.ClientSize.Width; clientHeight = window.ClientSize.Height; }
/// <summary> /// Gets the float property. /// </summary> /// <param name="prop">The property.</param> /// <returns></returns> private float GetFloatProperty(ETrackedDeviceProperty prop) { var error = ETrackedPropertyError.TrackedProp_Success; return(Hmd.GetFloatTrackedDeviceProperty(OpenVR.k_unTrackedDeviceIndex_Hmd, prop, ref error)); }
/// <summary> /// Initializes the rendering stuff. /// </summary> private void InitRendering() { Debug.Log("[VR] rendering init begin"); // Get HMD display size uint w = 0; uint h = 0; Hmd.GetRecommendedRenderTargetSize(ref w, ref h); int width = (int)w; int height = (int)h; // Create RT for each eye //TODO: Combine two RTs to one with [w*2, h] leftEyeRenderTarget = RenderTarget.New(); leftEyeRenderTarget.Init(PixelFormat.R8G8B8A8_UNorm, width, height); rightEyeRenderTarget = RenderTarget.New(); rightEyeRenderTarget.Init(PixelFormat.R8G8B8A8_UNorm, width, height); // Create texture structs for OpenVR leftEyeTexture = new Texture_t { handle = leftEyeRenderTarget.NativePtr, eColorSpace = EColorSpace.Auto, eType = ETextureType.DirectX }; rightEyeTexture = new Texture_t { handle = rightEyeRenderTarget.NativePtr, eColorSpace = EColorSpace.Auto, eType = ETextureType.DirectX }; // Calculate bounds and FOV // bounds and FOV calculation could be replaced with uv [0,0] - [1,1] and custom projection matrix from eye.Projection float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f; Hmd.GetProjectionRaw(EVREye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom); float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f; Hmd.GetProjectionRaw(EVREye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom); var tanHalfFov = new Vector2( Mathf.Max(-l_left, l_right, -r_left, r_right), Mathf.Max(-l_top, l_bottom, -r_top, r_bottom)); leftEyeTextureBounds = new VRTextureBounds_t { uMin = 0.5f + 0.5f * l_left / tanHalfFov.X, uMax = 0.5f + 0.5f * l_right / tanHalfFov.X, vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.Y, vMax = 0.5f - 0.5f * l_top / tanHalfFov.Y }; rightEyeTextureBounds = new VRTextureBounds_t { uMin = 0.5f + 0.5f * r_left / tanHalfFov.X, uMax = 0.5f + 0.5f * r_right / tanHalfFov.X, vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.Y, vMax = 0.5f - 0.5f * r_top / tanHalfFov.Y }; FieldOfView = 2.0f * Mathf.Atan(tanHalfFov.Y) * Mathf.Rad2Deg; /////// // Create Eye textures EyeTextures = new VREyeTexture[2]; EyeTextures[0] = new VREyeTexture { Viewport = new Viewport(0, 0, width, height), RenderTarget = leftEyeRenderTarget }; EyeTextures[1] = new VREyeTexture { Viewport = new Viewport(0, 0, width, height), RenderTarget = rightEyeRenderTarget }; // HMDMirrorRenderTarget = leftEyeRenderTarget; // Create render tasks leftEyeRenderTask = RenderTask.Create <SceneRenderTask>(); // Camera leftEyeRenderTask.Output = leftEyeRenderTarget; leftEyeRenderTask.End += (task, ctx) => { Submit(EVREye.Eye_Left, ref leftEyeTexture, ref leftEyeTextureBounds); }; rightEyeRenderTask = RenderTask.Create <SceneRenderTask>(); // Camera rightEyeRenderTask.Output = rightEyeRenderTarget; rightEyeRenderTask.End += (task, ctx) => { Submit(EVREye.Eye_Right, ref rightEyeTexture, ref rightEyeTextureBounds); }; // Create eyes EyesProperties = new VREye[3]; for (int i = 0; i < EyesProperties.Length; i++) { VREye eye = new VREye(); if (i < EyeTextures.Length) { eye.Texture = EyeTextures[i]; } EyesProperties[i] = eye; } Debug.Log("[VR] rendering init end"); IsConnected = true; }
protected override void OnLoad(EventArgs e) { base.OnLoad(e); InitShader(); InitBuffer(); // Define initialization parameters with debug flag. OVRTypes.InitParams initializationParameters = new OVRTypes.InitParams(); initializationParameters.Flags = OVRTypes.InitFlags.Debug; // Initialize the Oculus runtime. bool success = wrap.Initialize(initializationParameters); if (!success) { MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } // Use the head mounted display. OVRTypes.GraphicsLuid graphicsLuid; hmd = wrap.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } Console.WriteLine("SDK Version: " + wrap.GetVersionString()); try { for (int i = 0; i < 2; i++) { OVRTypes.Sizei idealTextureSize = hmd.GetFovTextureSize((OVRTypes.EyeType)i, hmd.DefaultEyeFov[i], 1); eyeRenderTexture[i] = new TextureBuffer(wrap, hmd, true, true, idealTextureSize, 1, IntPtr.Zero, 1); eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].GetSize(), 0); } // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution windowSize = new OVRTypes.Sizei(hmd.Resolution.Width / 2, hmd.Resolution.Height / 2); //For image displayed at ordinary monitor - copy of Oculus rendered one. OVRTypes.MirrorTextureDesc mirrorTextureDescription = new OVRTypes.MirrorTextureDesc(); mirrorTextureDescription.Format = OVRTypes.TextureFormat.R8G8B8A8_UNORM_SRGB; mirrorTextureDescription.Width = windowSize.Width; mirrorTextureDescription.Height = windowSize.Height; mirrorTextureDescription.MiscFlags = OVRTypes.TextureMiscFlags.None; // Create the texture used to display the rendered result on the computer monitor. OVRTypes.Result result; result = hmd.CreateMirrorTextureGL(mirrorTextureDescription, out mirrorTexture); WriteErrorDetails(wrap, result, "Failed to create mirror texture."); layerFov = layers.AddLayerEyeFov(); layerFov.Header.Flags = OVRTypes.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left layerFov.Header.Type = OVRTypes.LayerType.EyeFov; // Configure the mirror read buffer uint texId; result = mirrorTexture.GetBufferGL(out texId); WriteErrorDetails(wrap, result, "Failed to retrieve the texture from the created mirror texture buffer."); //Rendertarget for mirror desktop window GL.GenFramebuffers(1, out mirrorFbo); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo); GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, texId, 0); GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0); // Turn off vsync to let the compositor do its magic this.VSync = VSyncMode.Off; //wglSwapIntervalEXT(0); // FloorLevel will give tracking poses where the floor height is 0 result = hmd.SetTrackingOriginType(OVRTypes.TrackingOrigin.FloorLevel); WriteErrorDetails(wrap, result, "Failed to set tracking origin type."); GL.Enable(EnableCap.DepthTest); //DO NOT DELETE IT IN FUTURE UPDATES! } catch { // Release all resources Dispose(layers); if (mirrorFbo != 0) GL.DeleteFramebuffers(1, ref mirrorFbo); Dispose(mirrorTexture); for (int eyeIndex = 0; eyeIndex < 2; ++eyeIndex) { Dispose(eyeRenderTexture[eyeIndex]); Dispose(eyeDepthBuffer[eyeIndex]); } // Disposing the device, before the hmd, will cause the hmd to fail when disposing. // Disposing the device, after the hmd, will cause the dispose of the device to fail. // It looks as if the hmd steals ownership of the device and destroys it, when it's shutting down. // device.Dispose(); Dispose(hmd); Dispose(wrap); } }
protected override void OnLoad(EventArgs e) { base.OnLoad(e); InitShader(); InitBuffer(); // Initialize the Oculus runtime. bool success = wrap.Initialize(); if (!success) { MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } // Use the head mounted display. OVR.GraphicsLuid graphicsLuid; hmd = wrap.Hmd_Create(out graphicsLuid); if (hmd == null) { MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } if (hmd.ProductName == string.Empty) { MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error); Exit(); return; } Console.WriteLine("SDK Version: " + wrap.GetVersionString()); for (int i = 0; i < 2; i++) { OVR.Sizei idealTextureSize = hmd.GetFovTextureSize((OVR.EyeType)i, hmd.DefaultEyeFov[i], 1); eyeRenderTexture[i] = new OvrSharedRendertarget(idealTextureSize.Width, idealTextureSize.Height, hmd); eyeDepthBuffer[i] = new DepthBuffer(eyeRenderTexture[i].Width, eyeRenderTexture[i].Height); } //For image displayed at ordinary monitor - copy of Oculus rendered one. hmd.CreateMirrorTextureGL((uint)All.Srgb8Alpha8, this.Width, this.Height, out mirrorTex); layerFov = layers.AddLayerEyeFov(); layerFov.Header.Flags = OVR.LayerFlags.TextureOriginAtBottomLeft; // OpenGL Texture coordinates start from bottom left layerFov.Header.Type = OVR.LayerType.EyeFov; //Rendertarget for mirror desktop window GL.GenFramebuffers(1, out mirrorFbo); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, mirrorFbo); GL.FramebufferTexture2D(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget.Texture2D, mirrorTex.Texture.TexId, 0); GL.FramebufferRenderbuffer(FramebufferTarget.ReadFramebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, 0); GL.BindFramebuffer(FramebufferTarget.ReadFramebuffer, 0); EyeRenderDesc[0] = hmd.GetRenderDesc(OVR.EyeType.Left, hmd.DefaultEyeFov[0]); EyeRenderDesc[1] = hmd.GetRenderDesc(OVR.EyeType.Right, hmd.DefaultEyeFov[1]); // Specify which head tracking capabilities to enable. hmd.SetEnabledCaps(OVR.HmdCaps.DebugDevice); // Start the sensor //Update SDK 0.8: Usage of ovr_ConfigureTracking is no longer needed unless you want to disable tracking features. By default, ovr_Create enables the full tracking capabilities supported by any given device. //hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None); this.VSync = VSyncMode.Off; hmd.RecenterPose(); // Init GL GL.Enable(EnableCap.DepthTest); }