コード例 #1
0
        private Camera GetEyeCamera(EyeType type)
        {
            if (_eyeCameras.ContainsKey(type))
            {
                return(_eyeCameras[type]);
            }

            var eyeAnchorName = GetEyeAnchorName(type);
            var eyeAnchorObj  = _watcherAnchorObj.transform.Find(eyeAnchorName);

            if (eyeAnchorObj == null)
            {
                return(null);
            }

            var eyeCameraName  = GetEyeCameraName(type);
            var eyeCameraObjTf = eyeAnchorObj.transform.Find(eyeCameraName);

            if (eyeCameraObjTf == null)
            {
                return(null);
            }
            _eyeCameras[type] = eyeCameraObjTf.gameObject.GetComponent <Camera>();
            return(_eyeCameras[type]);
        }
コード例 #2
0
        internal GameObject GetOrCreateEyeCameraObj(EyeType type)
        {
            ToggleWatcherCamera(true);

            var eyeAnchorName = GetEyeAnchorName(type);
            var eyeAnchorObj  = SRDSceneEnvironment.GetOrCreateChild(_watcherAnchorObj.transform, eyeAnchorName);

            var        eyeCameraName  = GetEyeCameraName(type);
            var        eyeCameraObjTf = eyeAnchorObj.transform.Find(eyeCameraName);
            GameObject eyeCameraObj;

            if (eyeCameraObjTf == null)
            {
                eyeCameraObj      = GameObject.Instantiate(_watcherCameraObj);
                eyeCameraObj.name = eyeCameraName;
                eyeCameraObj.transform.SetParent(eyeAnchorObj.transform);
            }
            else
            {
                eyeCameraObj      = eyeCameraObjTf.gameObject;
                _eyeCameras[type] = eyeCameraObj.GetComponent <Camera>();
                if (_eyeCameras[type] == null)
                {
                    _eyeCameras[type] = eyeCameraObj.AddComponent <Camera>();
                    _eyeCameras[type].CopyFrom(_watcherCameraObj.GetComponent <Camera>());
                }
            }

            SRDSceneEnvironment.InitializePose(eyeCameraObj.transform);
            return(eyeCameraObj);
        }
コード例 #3
0
    //Displays an emotion (eye sprites) transiently for 2 seconds, then go back to neutral
    private IEnumerator transientEmotion(EyeType emote)
    {
        Opie.instance().head().set_eye_type(emote, Opie.Head.instant_action());
        yield return(new WaitForSeconds(2));

        Opie.instance().head().set_eye_type(EyeType.NEUTRAL, Opie.Head.instant_action());
    }
コード例 #4
0
ファイル: Graphics.cs プロジェクト: gr4viton/eye_out
        public void UpdateFromHmd(EyeType eye)
        {
            //TrackingState outTrack = hmd.GetTrackingState(0);

            //PoseF[] outEyePoses = new PoseF[2];

            //// hmdToEyeViewOffset[2] can be ovrEyeRenderDesc.HmdToEyeViewOffset returned
            ////     from ovrHmd_ConfigureRendering or ovrHmd_GetRenderDesc.
            //FovPort fov = renderDesc.Fov;

            //Vector3 hmdToEyeViewOffset1 = hmd.GetRenderDesc(eye, fov).HmdToEyeViewOffset;
            //Vector3 hmdToEyeViewOffset2 = hmd.GetRenderDesc(eye, fov).HmdToEyeViewOffset;

            //Vector3[] hmdToEyeViewOffset = new Vector3[] { hmdToEyeViewOffset1, hmdToEyeViewOffset2 };

            //hmd.GetEyePoses(frameIndex, hmdToEyeViewOffset, outEyePoses, ref outTrack);
            //var pose = renderPose[(int)eye] = outTrack.CameraPose;
            //hmd.GetHmdPosePerEye(eye); // obsolete in 0.4.4
            //hmd.GetEyePose(eye); // 0.4.1

            //var orientation = renderPose[(int)eye].Orientation;

            //OVR.MatrixProjection(renderDesc.Fov);

            config.player.hmd.PoseF = hmd.GetHmdPosePerEye(eye);
            //var pose = hmd.GetHmdPosePerEye(eye);
            //config.player.lastPose = pose;
            //config.player.UPDATE_hmdOrientation(pose.Orientation);
        }
コード例 #5
0
ファイル: Graphics.cs プロジェクト: gr4viton/eye_out
        private void SETUP_eyeRender(int eyeIndex)
        {
            GraphicsDevice.SetRasterizerState(GraphicsDevice.RasterizerStates.Default);
            EyeType       eye            = hmd.EyeRenderOrder[eyeIndex];
            EyeRenderDesc renderDesc     = eyeRenderDesc[(int)eye];
            Rect          renderViewport = eyeRenderViewport[(int)eye];

            UpdateFromHmd(eye);

            renderPose[(int)eye] = config.player.hmd.PoseF;

            // Calculate view matrix
            var finalRollPitchYaw = config.player.Rotation;

            var finalUp = finalRollPitchYaw.Transform(Vector3.UnitY);
            //var finalUp = Vector3.UnitY
            var finalForward = finalRollPitchYaw.Transform(-Vector3.UnitZ);

            var shiftedEyePos = config.player.Position;

            eyeView = Matrix.Translation(renderDesc.HmdToEyeViewOffset) * config.player.LookAtRH;

            // Calculate projection matrix
            eyeProjection = OVR.MatrixProjection(renderDesc.Fov, 0.001f, -1000.0f, true);
            eyeProjection.Transpose();

            eyeWorld = Matrix.Identity;

            // Set Viewport for our eye
            GraphicsDevice.SetViewport(renderViewport.ToViewportF());
        }
コード例 #6
0
        public Matrix GetProjectionMatrix(EyeType eyeType, float nearPlane = 0.1f, float farPlane = 10000f)
        {
            Matrix result;
            Matrix projection = OVR.MatrixProjection(this.eyeRenderDesc[(int)eyeType].Fov, nearPlane, farPlane, true);

            Matrix.Transpose(ref projection, out result);

            return(result);
        }
コード例 #7
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MLEye"/> class.
        /// </summary>
        /// <param name="eyeType">The type of eye to initialize.</param>
        public MLEye(EyeType eyeType)
        {
            this.Type = eyeType;

            // Initialize
            this.Center           = Vector3.zero;
            this.IsBlinking       = false;
            this.CenterConfidence = 0;
        }
コード例 #8
0
 public void SetFromStoreData(StoreData storeData)
 {
     Nickname        = storeData.CoreData.Nickname;
     CreateId        = storeData.CreateId;
     FontRegion      = storeData.CoreData.FontRegion;
     FavoriteColor   = storeData.CoreData.FavoriteColor;
     Gender          = storeData.CoreData.Gender;
     Height          = storeData.CoreData.Height;
     Build           = storeData.CoreData.Build;
     Type            = storeData.CoreData.Type;
     RegionMove      = storeData.CoreData.RegionMove;
     FacelineType    = storeData.CoreData.FacelineType;
     FacelineColor   = storeData.CoreData.FacelineColor;
     FacelineWrinkle = storeData.CoreData.FacelineWrinkle;
     FacelineMake    = storeData.CoreData.FacelineMake;
     HairType        = storeData.CoreData.HairType;
     HairColor       = storeData.CoreData.HairColor;
     HairFlip        = storeData.CoreData.HairFlip;
     EyeType         = storeData.CoreData.EyeType;
     EyeColor        = storeData.CoreData.EyeColor;
     EyeScale        = storeData.CoreData.EyeScale;
     EyeAspect       = storeData.CoreData.EyeAspect;
     EyeRotate       = storeData.CoreData.EyeRotate;
     EyeX            = storeData.CoreData.EyeX;
     EyeY            = storeData.CoreData.EyeY;
     EyebrowType     = storeData.CoreData.EyebrowType;
     EyebrowColor    = storeData.CoreData.EyebrowColor;
     EyebrowScale    = storeData.CoreData.EyebrowScale;
     EyebrowAspect   = storeData.CoreData.EyebrowAspect;
     EyebrowRotate   = storeData.CoreData.EyebrowRotate;
     EyebrowX        = storeData.CoreData.EyebrowX;
     EyebrowY        = storeData.CoreData.EyebrowY;
     NoseType        = storeData.CoreData.NoseType;
     NoseScale       = storeData.CoreData.NoseScale;
     NoseY           = storeData.CoreData.NoseY;
     MouthType       = storeData.CoreData.MouthType;
     MouthColor      = storeData.CoreData.MouthColor;
     MouthScale      = storeData.CoreData.MouthScale;
     MouthAspect     = storeData.CoreData.MouthAspect;
     MouthY          = storeData.CoreData.MouthY;
     BeardColor      = storeData.CoreData.BeardColor;
     BeardType       = storeData.CoreData.BeardType;
     MustacheType    = storeData.CoreData.MustacheType;
     MustacheScale   = storeData.CoreData.MustacheScale;
     MustacheY       = storeData.CoreData.MustacheY;
     GlassType       = storeData.CoreData.GlassType;
     GlassColor      = storeData.CoreData.GlassColor;
     GlassScale      = storeData.CoreData.GlassScale;
     GlassY          = storeData.CoreData.GlassY;
     MoleType        = storeData.CoreData.MoleType;
     MoleScale       = storeData.CoreData.MoleScale;
     MoleX           = storeData.CoreData.MoleX;
     MoleY           = storeData.CoreData.MoleY;
     Reserved        = 0;
 }
コード例 #9
0
    private IEnumerator TransientEmotion(EyeType emote)
    {
#if UNITY_ANDROID && !UNITY_EDITOR
        Opie.instance().head().set_eye_type(emote, Opie.Head.instant_action());
#endif
        yield return(new WaitForSeconds(2));        //2

#if UNITY_ANDROID && !UNITY_EDITOR
        Opie.instance().head().set_eye_type(EyeType.NEUTRAL, Opie.Head.instant_action());
#endif
    }
コード例 #10
0
        private void SetupHomographyCallback(EyeType type)
        {
            var eyeCamera          = _eyeCamera[type];
            var homographyMaterial = _eyeCamMaterial[type];

            if (_isSRPUsed)
            {
#if SRP_AVAILABLE
                SRPCallbackFunc srpCallback = (context, camera) =>
                {
                    if (camera.name != eyeCamera.name)
                    {
                        return;
                    }
                    var rt = RenderTexture.GetTemporary(_eyeCamera[type].targetTexture.descriptor);
                    Graphics.Blit(_eyeCamera[type].targetTexture, rt);
                    Graphics.Blit(rt, _eyeCamera[type].targetTexture, homographyMaterial);
                    rt.Release();
                    //Graphics.Blit(_eyeCamera[type].targetTexture, _eyeCamRenderTexture[type], homographyMaterial);
                };
                _eyeCamSRPPostCallback[type]              = srpCallback;
                RenderPipelineManager.endCameraRendering += _eyeCamSRPPostCallback[type];
#endif
            }
            else
            {
                // CommandBuffer
                var camEvent = CameraEvent.AfterImageEffects;
                var buf      = new CommandBuffer();
                buf.name = SRDHelper.SRDConstants.HomographyCommandBufferName;
                foreach (var attachedBuf in _eyeCamera[type].GetCommandBuffers(camEvent))
                {
                    if (attachedBuf.name == buf.name)
                    {
                        _eyeCamera[type].RemoveCommandBuffer(camEvent, attachedBuf);
                        break;
                    }
                }
                int temp = Shader.PropertyToID("_Temp");
                buf.GetTemporaryRT(temp, -1, -1, 0, FilterMode.Bilinear);
                buf.Blit(_eyeCamera[type].targetTexture, temp);
                buf.Blit(temp, _eyeCamera[type].targetTexture, homographyMaterial);
                buf.ReleaseTemporaryRT(temp);
                //buf.Blit(_eyeCamera[type].targetTexture, _eyeCamRenderTexture[type], homographyMaterial);

                _eyeCamera[type].AddCommandBuffer(camEvent, buf);
            }
        }
コード例 #11
0
 /// <summary>
 /// Get HMD screen viewport.
 /// </summary>
 /// <param name="eyeType">Types of HMD eyes</param>
 /// <returns>Viewport array(float type array, size 4).</returns>
 public float[] GetViewport(EyeType eyeType)
 {
     float[] viewport = new float[4];
     if (Application.platform == RuntimePlatform.IPhonePlayer)
     {
         viewport[0] = 0.0f;
         viewport[1] = 0.0f;
         viewport[2] = 0.5f;
         viewport[3] = 1.0f;
     }
     else
     {
         viewport[0] = 0.5f;
         viewport[1] = 0.0f;
         viewport[2] = 0.5f;
         viewport[3] = 1.0f;
     }
     return(viewport);
 }
コード例 #12
0
        public async Task <int> SaveEyeType(EyeType eyeType)
        {
            if (eyeType.Id != 0)
            {
                _context.EyeTypes.Update(eyeType);

                await _context.SaveChangesAsync();

                return(1);
            }
            else
            {
                await _context.EyeTypes.AddAsync(eyeType);

                await _context.SaveChangesAsync();

                return(1);
            }
        }
コード例 #13
0
ファイル: PetAnim.cs プロジェクト: makscee/Mailru-Game-Jam
    private void Breath(float dir)
    {
        const float over = 1f;

        Utils.Animate(Vector2.zero, new Vector2(0, 0.08f), over, (v) =>
        {
            v *= dir;

            PlayerParts.Body.transform.localScale    += v;
            PlayerParts.Body.transform.localPosition += v;

            PlayerParts.Suit.transform.localScale    += v;
            PlayerParts.Suit.transform.localPosition += v;

            PlayerParts.Eyes.transform.localPosition += 2 * v;
            PlayerParts.Nose.transform.localPosition += 2 * v;

            PlayerParts.Crown.transform.localPosition -= 3 * v;

            PlayerParts.Paws.transform.localPosition += v;

            v.x = v.y;
            v.y = 0;
            v.z = 0;
            PlayerParts.Tail.transform.localPosition += 2 * v;
        });

        Utils.InvokeDelayed(() =>
        {
            ++_changeFaceIter;
            ++_blinkIter;

            // change face
            if (_changeFaceIter >= 4)
            {
                _changeFaceIter = 0;
                View.EyeType    = EyeType.Blink0;
                Utils.InvokeDelayed(() =>
                {
                    UpdateFace();
                    Breath(-dir);
                }, 0.15f);
                return;
            }

            // blinking
            if (_blinkIter >= 6)
            {
                _blinkIter = 0;

                _lastEyeType = View.EyeType;
                View.EyeType = EyeType.Blink0;
                Utils.InvokeDelayed(() =>
                {
                    View.EyeType = _lastEyeType;
                    Breath(-dir);
                }, 0.15f);
                return;
            }

            Breath(-dir);
        }, over);
    }
コード例 #14
0
ファイル: PetAnim.cs プロジェクト: makscee/Mailru-Game-Jam
 public void SetFace(EyeType e, NoseType n)
 {
     View.EyeType  = e;
     View.NoseType = n;
 }
コード例 #15
0
 /// <inheritdoc />
 public override Sizei GetFovTextureSize(IntPtr sessionPtr, EyeType eye, FovPort fov, float pixelsPerDisplayPixel)
 {
     return(SafeNativeMethods.ovr_GetFovTextureSize(sessionPtr, eye, fov, pixelsPerDisplayPixel));
 }
コード例 #16
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 public PoseF GetEyePose(EyeType eye)
 {
     return ovrHmd_GetEyePose(this._hmd, eye);
 }
コード例 #17
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 public EyeRenderDesc GetRenderDesc(EyeType eye, FovPort fov)
 {
     return ovrHmd_GetRenderDesc(this._hmd, eye, fov);
 }
コード例 #18
0
ファイル: HMD.cs プロジェクト: kobush/RiftDotNet
 public Matrix[] GetEyeTimewarpMatrices(EyeType eye, IPosef renderPose)
 {
     return _resources.Device.GetEyeTimewarpMatrices(eye, renderPose);
 }
コード例 #19
0
 /// <summary>
 /// Get calibrated HMD projection matrix.
 /// </summary>
 /// <param name="eyeType">Types of HMD eyes</param>
 /// <returns>Projection matrix array(float type array, size 16).</returns>
 public float[] GetProjectionMatrix(EyeType eyeType)
 {
     float[] projection = new float[16];
     NativeAPI.WearableCalibration_getProjectionMatrix(projection, (int)eyeType);
     return(projection);
 }
コード例 #20
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 private static extern Size2 ovrHmd_GetFovTextureSize(IntPtr hmd, EyeType eye, FovPort fov, float pixelsPerDisplayPixel);
コード例 #21
0
ファイル: HMD.cs プロジェクト: maral/Extensions
 private static extern EyeRenderDesc ovrHmd_GetRenderDesc(IntPtr hmd, EyeType eye, FovPort fov);
コード例 #22
0
        public OculusTextureSwapChain(OvrWrap ovr,
                                      IntPtr sessionPtr,
                                      SharpDX.Direct3D11.Device device,
                                      EyeType eye,
                                      Format format,
                                      Sizei size,
                                      bool createDepthStencilView = false,
                                      bool isDebugDevice          = false)
        {
            _ovr          = ovr;
            _sessionPtr   = sessionPtr;
            _size         = size;
            _viewportSize = size;
            _viewport     = new ViewportF(0.0f, 0.0f, (float)size.Width, (float)size.Height);

            Format srgbFormat = GetSRgbFormat(format);

            TextureFormat        textureFormat = SharpDXHelpers.GetTextureFormat(srgbFormat);
            TextureSwapChainDesc swapChainDesc = new TextureSwapChainDesc()
            {
                ArraySize   = 1,
                BindFlags   = TextureBindFlags.DX_RenderTarget,
                Format      = textureFormat,
                Height      = _size.Height,
                MipLevels   = 1,
                MiscFlags   = TextureMiscFlags.DX_Typeless,
                SampleCount = 1,
                Width       = _size.Width
            };

            Texture2DDescription description1 = new Texture2DDescription()
            {
                ArraySize         = 1,
                BindFlags         = BindFlags.DepthStencil,
                CpuAccessFlags    = CpuAccessFlags.None,
                Format            = Format.R24G8_Typeless,
                Height            = _size.Height,
                MipLevels         = 1,
                OptionFlags       = ResourceOptionFlags.None,
                SampleDescription = new SampleDescription(1, 0),
                Usage             = ResourceUsage.Default,
                Width             = _size.Width
            };

            ShaderResourceViewDescription description2 = new ShaderResourceViewDescription()
            {
                Format    = srgbFormat,
                Dimension = ShaderResourceViewDimension.Texture2D
            };

            description2.Texture2D.MipLevels = 1;

            // Create a texture swap chain, which will contain the textures to render to, for the current eye.
            var result = _ovr.CreateTextureSwapChainDX(_sessionPtr, device.NativePointer, ref swapChainDesc, out _textureSwapChainPtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Error creating Oculus TextureSwapChain: " + lastError.ErrorString, lastError.Result);
            }


            int length;

            result = _ovr.GetTextureSwapChainLength(_sessionPtr, _textureSwapChainPtr, out length);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to retrieve the number of buffers of the created swap chain: " + lastError.ErrorString, lastError.Result);
            }


            _textures = new TextureItem[length];

            for (int index = 0; index < length; ++index)
            {
                IntPtr bufferPtr;
                result = _ovr.GetTextureSwapChainBufferDX(_sessionPtr, _textureSwapChainPtr, index, typeof(Texture2D).GUID, out bufferPtr);

                if (result < Ab3d.OculusWrap.Result.Success)
                {
                    var lastError = _ovr.GetLastErrorInfo();
                    throw new OvrException("Failed to retrieve a texture from the created swap chain: " + lastError.ErrorString, lastError.Result);
                }

                Texture2D        texture2D1       = new Texture2D(bufferPtr);
                Texture2D        texture2D2       = null;
                DepthStencilView depthStencilView = null;

                if (createDepthStencilView)
                {
                    texture2D2       = new Texture2D(device, description1);
                    depthStencilView = new DepthStencilView(device, texture2D2, new DepthStencilViewDescription()
                    {
                        Flags     = DepthStencilViewFlags.None,
                        Dimension = DepthStencilViewDimension.Texture2D,
                        Format    = Format.D24_UNorm_S8_UInt
                    });
                }

                _textures[index] = new TextureItem()
                {
                    Texture            = texture2D1,
                    TextureDescription = texture2D1.Description,
                    DepthBuffer        = texture2D2,
                    DepthStencilView   = depthStencilView,
                    RTView             = new RenderTargetView(device, texture2D1, new RenderTargetViewDescription()
                    {
                        Format    = format,
                        Dimension = RenderTargetViewDimension.Texture2D
                    }),
                    SRView = new ShaderResourceView(device, texture2D1, description2)
                };


                if (isDebugDevice)
                {
                    var eyeTextAndIndex = eye.ToString() + index.ToString();

                    _textures[index].Texture.DebugName          = "OculusBackBuffer" + eyeTextAndIndex;
                    _textures[index].RTView.DebugName           = "OculusRT" + eyeTextAndIndex;
                    _textures[index].SRView.DebugName           = "OculusSR" + eyeTextAndIndex;
                    _textures[index].DepthBuffer.DebugName      = "OculusDepthBuffer" + eyeTextAndIndex;
                    _textures[index].DepthStencilView.DebugName = "OculusDepthStencilView" + eyeTextAndIndex;
                }
            }
        }
コード例 #23
0
ファイル: HMD.cs プロジェクト: maral/Extensions
 private static extern Size2 ovrHmd_GetFovTextureSize(IntPtr hmd, EyeType eye, FovPort fov, float pixelsPerDisplayPixel);
コード例 #24
0
ファイル: HMD.cs プロジェクト: maral/Extensions
 private static extern PoseF ovrHmd_GetEyePose(IntPtr hmd, EyeType eye);
コード例 #25
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 private static extern EyeRenderDesc ovrHmd_GetRenderDesc(IntPtr hmd, EyeType eye, FovPort fov);
コード例 #26
0
ファイル: program.cs プロジェクト: bholcomb/oculusSharp
			void initEyeTarget(EyeType eye)
			{
				EyeTarget e = new EyeTarget();
				e.eye = eye;
				e.fbo = GL.GenFramebuffer();
				e.depthTexture = GL.GenRenderbuffer();
				e.desc = OvrDLL.ovr_GetRenderDesc(session, eye, eye == EyeType.Left ? hmdDesc.LeftDefaultEyeFov : hmdDesc.RightDefaultEyeFov);
				e.renderTargetSize = OvrDLL.ovr_GetFovTextureSize(session, EyeType.Left, hmdDesc.LeftDefaultEyeFov, 1.0f);

				e.proj = OvrDLL.ovrMatrix4f_Projection(e.desc.Fov, 0.1f, 1000.0f, ProjectionModifier.ClipRangeOpenGL);

				//create the texture swap chain
				TextureSwapChainDesc swapDesc = new TextureSwapChainDesc()
				{
					Type = TextureType.Texture2D,
					ArraySize = 1,
					Format = TextureFormat.R8G8B8A8_UNORM_SRGB,
					Width = e.renderTargetSize.Width,
					Height = e.renderTargetSize.Height,
					MipLevels = 1,
					SampleCount = 1,
					StaticImage = 0
				};

				result = OvrDLL.ovr_CreateTextureSwapChainGL(session, ref swapDesc, ref e.swapChain);
				if (result < 0)
				{
					Console.WriteLine("Error creating swap chain");
					OvrDLL.ovr_GetLastErrorInfo(ref error);
					Console.WriteLine("Last Error Info: {0}-{1}", error.result, error.ErrorString);
				}

				int swapChainLength = 0;
				OvrDLL.ovr_GetTextureSwapChainLength(session, e.swapChain, ref swapChainLength);
				Console.WriteLine("Swapchain length: {0}", swapChainLength);

				for(int i = 0; i< swapChainLength; i++)
				{
					UInt32 texId = 0;
					OvrDLL.ovr_GetTextureSwapChainBufferGL(session, e.swapChain, i, ref texId);
					GL.BindTexture(TextureTarget.Texture2D, texId);
					GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
					GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
					GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
					GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);

				}

				int currentIndex = 0;
				OvrDLL.ovr_GetTextureSwapChainCurrentIndex(session, e.swapChain, ref currentIndex);
				Console.WriteLine("Swapchain current index: {0}", currentIndex);

				UInt32 chainTexId = 0;
				OvrDLL.ovr_GetTextureSwapChainBufferGL(session, e.swapChain, currentIndex, ref chainTexId);

				GL.BindFramebuffer(FramebufferTarget.Framebuffer, e.fbo);
				GL.BindRenderbuffer(RenderbufferTarget.Renderbuffer, e.depthTexture);
				GL.RenderbufferStorage(RenderbufferTarget.Renderbuffer, RenderbufferStorage.DepthComponent32f, e.renderTargetSize.Width, e.renderTargetSize.Height);
				GL.FramebufferRenderbuffer(FramebufferTarget.DrawFramebuffer, FramebufferAttachment.Depth, RenderbufferTarget.Renderbuffer, e.depthTexture);
				
				GL.FramebufferTexture(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, chainTexId, 0);
				DrawBuffersEnum[] drawBuffers = new DrawBuffersEnum[1] { DrawBuffersEnum.ColorAttachment0 };
				GL.DrawBuffers(1, drawBuffers);

				//check frame buffer completeness
				FramebufferErrorCode err = GL.CheckFramebufferStatus(FramebufferTarget.Framebuffer);
				if(err != FramebufferErrorCode.FramebufferComplete)
				{
					Console.WriteLine("Error in frame buffer: {0}", err);
				}

				eyes[(int)eye] = e;

				GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
			}
コード例 #27
0
 /// <summary>
 /// Get infrared image data corresponding to the particular eye type currently tracked
 /// </summary>
 /// <param name="eye"> The eye type of the infrared image. </param>
 /// <returns> The infrared image of the eye. </returns>
 public Image GetInfraredImage(EyeType eye)
 {
     return FingoManager.Instance.GetInfraredImage(eye);
 }
コード例 #28
0
ファイル: Eyes.cs プロジェクト: makscee/Mailru-Game-Jam
 public static Sprite Get(EyeType type)
 {
     return(_sprites[type]);
 }
コード例 #29
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 private static extern PoseF ovrHmd_GetEyePose(IntPtr hmd, EyeType eye);
コード例 #30
0
ファイル: FingoMain.cs プロジェクト: MJX1010/UnityShader
 /// <summary>
 /// Get infrared image data corresponding to the particular eye type currently tracked
 /// </summary>
 /// <param name="eye"> The eye type of the infrared image. </param>
 /// <returns> The infrared image of the eye. </returns>
 public Image GetInfraredImage(EyeType eye)
 {
     return(FingoManager.GetInfraredImage(eye));
 }
コード例 #31
0
        /// <summary>
        /// UpdateRenderingContext is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// See remarks for more info about the usual tasks that are preformed in this method.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>UpdateRenderingContext</b> is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// </para>
        /// <para>
        /// This method is usually called from the <see cref="BeginVirtualRealityRenderingStep"/> (when the virtual reality provider is enabled).
        /// </para>
        /// <para>
        /// Usually this method does the following:<br/>
        /// - Creates the back buffers and views that are needed for rendering
        /// (the back buffers are also recreated if the size of <see cref="RenderingContext.CurrentBackBufferDescription"/> is different from the size of previously created back buffers).<br/>
        /// - Updates the <see cref="VirtualRealityContext.CurrentEye"/> property.<br/>
        /// - Sets the <see cref="RenderingContext.UsedCamera"/> property to a <see cref="StereoscopicCamera"/><br/>
        /// - Calls <see cref="RenderingContext.SetBackBuffer"/> method and sets the new back buffers.<br/>
        /// - Updates <see cref="ResolveMultisampledBackBufferRenderingStep.DestinationBuffer"/> on the <see cref="DXScene.DefaultResolveMultisampledBackBufferRenderingStep"/> and sets it to the eye texture.
        /// </para>
        /// </remarks>
        /// <param name="renderingContext">RenderingContext</param>
        /// <param name="isNewFrame">true if no eye was yet rendered for the current frame; false if the first eye was already rendered for the current frame and we need to render the second eye</param>
        public override void UpdateRenderingContext(RenderingContext renderingContext, bool isNewFrame)
        {
            // This code is executed inside BeginVirtualRealityRenderingStep before all the objects are rendered.

            // Base method does:
            // - sets the virtualRealityContext.CurrentEye based on the isNewFrame parameter: isNewFrame == true => LeftEye else RightEye
            // - ensures that stereoscopicCamera is created and sets its properties for the current eye and based on the current EyeSeparation, Parallax and InvertLeftRightView
            // - sets renderingContext.UsedCamera = stereoscopicCamera
            base.UpdateRenderingContext(renderingContext, isNewFrame);


            var virtualRealityContext = renderingContext.VirtualRealityContext;

            Eye     currentEye = virtualRealityContext.CurrentEye;
            EyeType ovrEye     = currentEye == Eye.Left ? EyeType.Left : EyeType.Right;

            int eyeIndex = currentEye == Eye.Left ? 0 : 1;


            FovPort defaultEyeFov = _hmdDesc.DefaultEyeFov[eyeIndex];
            var     idealSize     = _ovr.GetFovTextureSize(_sessionPtr, ovrEye, defaultEyeFov, _pixelsPerDisplayPixel);

            // When we render this frame for the first time
            // we also check that all the required resources are created
            // Check if we need to create or recreate the RenderTargetViews and DepthStencilViews
            if (isNewFrame &&
                (_eyeTextureSwapChains[eyeIndex] == null ||
                 _eyeTextureSwapChains[eyeIndex].Size.Width != idealSize.Width ||
                 _eyeTextureSwapChains[eyeIndex].Size.Height != idealSize.Height))
            {
                CreateResources(renderingContext.DXScene);
            }

            if (isNewFrame)
            {
                _ovr.GetEyePoses(_sessionPtr, 0L, true, _hmdToEyeOffset, ref _eyePoses, out _sensorSampleTime);
            }

            var camera = renderingContext.DXScene.Camera;


            // From OculusRoomTiny main.cpp #221

            //Get the pose information
            var eyeQuat = SharpDXHelpers.ToQuaternion(_eyePoses[eyeIndex].Orientation);
            var eyePos  = SharpDXHelpers.ToVector3(_eyePoses[eyeIndex].Position);

            // Get view and projection matrices for the Rift camera
            Vector3 cameraPosition       = camera.GetCameraPosition();
            Matrix  cameraRotationMatrix = camera.View;

            cameraRotationMatrix.M41 = 0; // Remove translation
            cameraRotationMatrix.M42 = 0;
            cameraRotationMatrix.M43 = 0;

            cameraRotationMatrix.Invert(); // Invert to get rotation matrix

            Vector4 rotatedEyePos4 = Vector3.Transform(eyePos, cameraRotationMatrix);
            var     rotatedEyePos  = new Vector3(rotatedEyePos4.X, rotatedEyePos4.Y, rotatedEyePos4.Z);

            var finalCameraPosition = cameraPosition + rotatedEyePos;

            var eyeQuaternionMatrix = Matrix.RotationQuaternion(eyeQuat);
            var finalRotationMatrix = eyeQuaternionMatrix * cameraRotationMatrix;

            Vector4 lookDirection4 = Vector3.Transform(new Vector3(0, 0, -1), finalRotationMatrix);
            var     lookDirection  = new Vector3(lookDirection4.X, lookDirection4.Y, lookDirection4.Z);

            Vector4 upDirection4 = Vector3.Transform(Vector3.UnitY, finalRotationMatrix);
            var     upDirection  = new Vector3(upDirection4.X, upDirection4.Y, upDirection4.Z);

            var viewMatrix = Matrix.LookAtRH(finalCameraPosition, finalCameraPosition + lookDirection, upDirection);



            float zNear = camera.NearPlaneDistance;
            float zFar  = camera.FarPlaneDistance;

            if (zNear < 0.05f)
            {
                zNear = 0.05f;
            }

            zFar *= 1.2f; // increase the zFar - the FarPlaneDistance is not exactly correct because the camera can be higher because the eye's Position can be over the Camera's position

            //zNear = 0.1f;
            //zFar = 100;

            var eyeRenderDesc = _ovr.GetRenderDesc(_sessionPtr, ovrEye, _hmdDesc.DefaultEyeFov[eyeIndex]);

            var projectionMatrix = _ovr.Matrix4f_Projection(eyeRenderDesc.Fov, zNear, zFar, ProjectionModifier.None).ToMatrix();

            projectionMatrix.Transpose();

            _matrixCamera.Projection = projectionMatrix;
            _matrixCamera.View       = viewMatrix;
            _matrixCamera.SetCameraPosition(finalCameraPosition);

            renderingContext.UsedCamera = _matrixCamera;


            // Change the current viewport
            renderingContext.CurrentViewport = _eyeTextureSwapChains[eyeIndex].Viewport;
            renderingContext.DeviceContext.Rasterizer.SetViewport(renderingContext.CurrentViewport);

            if (_msaaBackBuffer == null)
            {
                renderingContext.SetBackBuffer(backBuffer: _eyeTextureSwapChains[eyeIndex].CurrentTexture,
                                               backBufferDescription: _eyeTextureSwapChains[eyeIndex].CurrentTextureDescription,
                                               renderTargetView: _eyeTextureSwapChains[eyeIndex].CurrentRTView,
                                               depthStencilView: _eyeTextureSwapChains[eyeIndex].CurrentDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep
            }
            else
            {
                // MSAA
                renderingContext.SetBackBuffer(backBuffer: _msaaBackBuffer,
                                               backBufferDescription: _msaaBackBufferDescription,
                                               renderTargetView: _msaaBackBufferRenderTargetView,
                                               depthStencilView: _msaaDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep

                renderingContext.DXScene.DefaultResolveMultisampledBackBufferRenderingStep.DestinationBuffer = _eyeTextureSwapChains[eyeIndex].CurrentTexture;
            }


            // When we render this frame for the first time set the NewViewport on the ChangeBackBufferRenderingStep to resets the Viewport from split screen viewport to the final full screen viewport
            if (isNewFrame && _resetViewportRenderingStep != null)
            {
                int backBufferWidth  = renderingContext.FinalBackBufferDescription.Width;
                int backBufferHeight = renderingContext.FinalBackBufferDescription.Height;

                _resetViewportRenderingStep.NewViewport = new ViewportF(0, 0, backBufferWidth, backBufferHeight);
            }
        }
コード例 #32
0
 /// <summary>
 /// Get infrared image data corresponding to the particular eye type currently tracked
 /// </summary>
 /// <param name="eye"> The eye type of the color image. </param>
 /// <returns> The color image of the eye. </returns>
 public Image GetRGBImage(EyeType eye)
 {
     return(FingoManager.Instance.GetRGBImage(eye));
 }
コード例 #33
0
ファイル: HMD.cs プロジェクト: Youenn-Bouglouan/Extensions
 public Size2 GetFovTextureSize(EyeType eye, FovPort fov, float pixelsPerDisplayPixel)
 {
     return ovrHmd_GetFovTextureSize(this._hmd, eye, fov, pixelsPerDisplayPixel);
 }
コード例 #34
0
ファイル: HMD.cs プロジェクト: kobush/RiftDotNet
 public ITextureSize GetFovTextureSize(EyeType eye, IFovPort fov, float pixelsPerDisplayPixel)
 {
     return _resources.Device.GetFovTextureSize(eye, fov, pixelsPerDisplayPixel);
 }
コード例 #35
0
ファイル: HMD.cs プロジェクト: kobush/RiftDotNet
 public void EndEyeRender(EyeType eye,
     IPosef renderPose, ITextureSize textureSize, Viewport renderViewport,
     uint textureId)
 {
     _resources.Device.EndEyeRender(eye, renderPose, textureSize,
         renderViewport, textureId);
 }
コード例 #36
0
 public static string GenerateDragonImageUrl(DragonType breed, Gender gender, Age age, BodyGene bodygene, Color body, WingGene winggene, Color wings, TertiaryGene tertgene, Color tert, Element element, EyeType eyetype)
 => GenerateDragonImageUrl((int)breed, (int)gender, (int)age, (int)bodygene, (int)body, (int)winggene, (int)wings, (int)tertgene, (int)tert, (int)element, (int)eyetype);
コード例 #37
0
        public Matrix GetProjectionMatrix(EyeType eyeType, float nearPlane = 0.1f, float farPlane = 10000f)
        {
            Matrix result;
            Matrix projection = OVR.MatrixProjection(this.eyeRenderDesc[(int)eyeType].Fov, nearPlane, farPlane, true);
            Matrix.Transpose(ref projection, out result);

            return result;
        }
コード例 #38
0
ファイル: HMD.cs プロジェクト: kobush/RiftDotNet
 public IPosef GetEyePose(EyeType eye)
 {
     return _resources.Device.GetEyePose(eye);
 }
コード例 #39
0
 public static extern Sizei ovr_GetFovTextureSize(ovrSession session, EyeType eye, FovPort vof, float pixelsPerDisplayPixel);
コード例 #40
0
 /// <inheritdoc />
 public override EyeRenderDesc GetRenderDesc(IntPtr sessionPtr, EyeType eyeType, FovPort fov)
 {
     return(SafeNativeMethods.ovr_GetRenderDesc(sessionPtr, eyeType, fov));
 }
コード例 #41
0
 public static extern EyeRenderDesc ovr_GetRenderDesc(ovrSession session, EyeType eye, FovPort fov);
コード例 #42
0
 public PoseF GetEyePose(EyeType eye)
 {
     return this.renderPose[(int)eye];
 }
コード例 #43
0
ファイル: HMD.cs プロジェクト: kobush/RiftDotNet
 public IPosef BeginEyeRender(EyeType eye)
 {
     return _resources.Device.BeginEyeRender(eye);
 }