示例#1
0
 //Convert OSVR.ClientKit.Viewport to Rect
 public static Rect ConvertViewport(OSVR.ClientKit.Viewport viewport, OSVR.ClientKit.DisplayDimensions surfaceDisplayDimensions, int numDisplayInputs, int eyeIndex, int totalDisplayWidth)
 {
     //Unity expects normalized coordinates, not pixel coordinates
     if (numDisplayInputs == 1)
     {
         return(new Rect((float)viewport.Left / (float)surfaceDisplayDimensions.Width,
                         (float)viewport.Bottom / (float)surfaceDisplayDimensions.Height,
                         (float)viewport.Width / (float)surfaceDisplayDimensions.Width,
                         (float)viewport.Height / (float)surfaceDisplayDimensions.Height));
     }
     else if (numDisplayInputs == 2)
     {
         //with two inputs in fullscreen mode, viewports expect to fill the screen
         //Unity can only output to one window, so we offset the right eye by half the total width of the displays
         return(new Rect(eyeIndex == 0 ? 0 : 0.5f + (float)viewport.Left / (float)totalDisplayWidth,
                         (float)viewport.Bottom / (float)surfaceDisplayDimensions.Height,
                         (float)viewport.Width / (float)totalDisplayWidth,
                         (float)viewport.Height / (float)surfaceDisplayDimensions.Height));
     }
     else
     {
         Debug.LogError("[OSVR-Unity] More than two video inputs is not supported. Using default viewport.");
         return(new Rect(0, 0, 0.5f, 1f));
     }
 }
示例#2
0
            //For each Surface, update viewing parameters and render the surface
            public void UpdateSurfaces()
            {
                //for each surface
                for (uint surfaceIndex = 0; surfaceIndex < SurfaceCount; surfaceIndex++)
                {
                    //get the eye's surface
                    VRSurface surface = Surfaces[surfaceIndex];

                    //get viewport from ClientKit and set surface viewport
                    OSVR.ClientKit.Viewport viewport = Viewer.DisplayController.DisplayConfig.GetRelativeViewportForViewerEyeSurface(
                        Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);

                    surface.SetViewport(Math.ConvertViewport(viewport));

                    //get projection matrix from ClientKit and set surface projection matrix
                    OSVR.ClientKit.Matrix44f projMatrix = Viewer.DisplayController.DisplayConfig.GetProjectionMatrixForViewerEyeSurfacef(
                        Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex,
                        surface.Camera.nearClipPlane, surface.Camera.farClipPlane, OSVR.ClientKit.MatrixConventionsFlags.ColMajor);

                    surface.SetProjectionMatrix(Math.ConvertMatrix(projMatrix));

                    //render the surface
                    surface.Render();
                }
            }
            //Get the viewport of a given eye from RenderManager
            public OSVR.ClientKit.Viewport GetEyeViewport(int eye)
            {
                OSVR.ClientKit.Viewport  v = new OSVR.ClientKit.Viewport();
                OSVR_ViewportDescription viewportDescription = GetViewport(eye);

                v.Left   = (int)viewportDescription.left;
                v.Bottom = (int)viewportDescription.lower;
                v.Width  = (int)viewportDescription.width;
                v.Height = (int)viewportDescription.height;
                return(v);
            }
示例#4
0
            private void CreateRenderTextures()
            {
                if (stereoRigSetup == StereoRigSetup.OneCameraBothEyes)
                {
                    //create a RenderTexture for this eye's camera to render into
                    RenderTexture renderTexture = new RenderTexture((int)TotalDisplayWidth, (int)TotalDisplayHeight, 24, RenderTextureFormat.Default);
                    if (QualitySettings.antiAliasing > 0)
                    {
                        renderTexture.antiAliasing = QualitySettings.antiAliasing;
                    }
                    StereoTargetRenderTexture0 = renderTexture;
                    _camera0.targetTexture     = StereoTargetRenderTexture0;
                    RenderTexture.active       = StereoTargetRenderTexture0;

                    //Set the native texture pointer so we can access this texture from the plugin
                    RenderManager.SetEyeColorBuffer(renderTexture.GetNativeTexturePtr(), 0);
                    RenderManager.SetEyeColorBuffer(renderTexture.GetNativeTexturePtr(), 1);
                }
                else //two-camera setup
                {
                    //left eye
                    OSVR.ClientKit.Viewport leftEyeViewport = RenderManager.GetEyeViewport(0);

                    //create a RenderTexture for this eye's camera to render into
                    RenderTexture renderTexture0 = new RenderTexture(leftEyeViewport.Width, leftEyeViewport.Height, 24, RenderTextureFormat.Default);
                    if (QualitySettings.antiAliasing > 0)
                    {
                        renderTexture0.antiAliasing = QualitySettings.antiAliasing;
                    }
                    StereoTargetRenderTexture0 = renderTexture0;
                    _camera0.targetTexture     = StereoTargetRenderTexture0;


                    //right eye
                    OSVR.ClientKit.Viewport rightEyeViewport = RenderManager.GetEyeViewport(1);

                    //create a RenderTexture for this eye's camera to render into
                    RenderTexture renderTexture1 = new RenderTexture(rightEyeViewport.Width, rightEyeViewport.Height, 24, RenderTextureFormat.Default);
                    if (QualitySettings.antiAliasing > 0)
                    {
                        renderTexture1.antiAliasing = QualitySettings.antiAliasing;
                    }
                    StereoTargetRenderTexture1 = renderTexture1;
                    _camera1.targetTexture     = StereoTargetRenderTexture1;

                    //set rendermanager color buffers
                    //Set the native texture pointer so we can access this texture from the plugin
                    RenderManager.SetEyeColorBuffer(renderTexture0.GetNativeTexturePtr(), 0);
                    RenderManager.SetEyeColorBuffer(renderTexture1.GetNativeTexturePtr(), 1);
                }
            }
示例#5
0
 //Set the camera's viewport rect
 public void SetViewport(OSVR.ClientKit.Viewport viewport)
 {
     Viewport = viewport;
 }
 //Get the viewport of a given eye from RenderManager
 public OSVR.ClientKit.Viewport GetEyeViewport(int eye)
 {
     OSVR.ClientKit.Viewport v = new OSVR.ClientKit.Viewport();
     OSVR_ViewportDescription viewportDescription = GetViewport(eye);
     v.Left = (int)viewportDescription.left;
     v.Bottom = (int)viewportDescription.lower;
     v.Width = (int)viewportDescription.width;
     v.Height = (int)viewportDescription.height;
     return v;
 }
示例#7
0
 public static Rect ConvertViewportRenderManager(OSVR.ClientKit.Viewport viewport)
 {
     //Unity expects normalized coordinates, not pixel coordinates
     //@todo below assumes left and right eyes split the screen in half horizontally
     return(new Rect(viewport.Left / viewport.Width, viewport.Bottom / viewport.Height, viewport.Width / viewport.Width, 1));
 }