public void next()
        {
            if (isReleased)
            {
                return;
            }
            Log.gpl.d("WVR_TexMngr", "Get texture from queue");

#if UNITY_EDITOR
            if (Application.isEditor)
            {
                // We can test the dictionary in editor.
                currentPtr = keyArray[keyArrayIndex++];
                if (keyArrayIndex >= textures.Count)
                {
                    keyArrayIndex = 0;
                }
            }
            else
#endif
            {
                currentPtr = (IntPtr)WaveVR_Utils.WVR_GetAvailableTextureID(queue);
            }

            currentRt       = GetRenderTextureByPtr(currentPtr);
            currentDepthPtr = GetDepthByPtr(currentPtr);
            //Log.d("WVR_TexMngr", "current rt" + currentPtr + " dp" + currentDepthPtr);
        }
Пример #2
0
    /// <summary>
    /// Main Loop: here we get the rototranslational data of the device (that will be streamed to Unity)
    /// And will show the preview on the device screen (streamed from Unity)
    /// </summary>
    /// <returns></returns>
    private IEnumerator MainLoop()
    {
#if !UNITY_EDITOR
        yield return(1);

        //we're about to render the first frame. This is necessary to make the app wake-up correctly if you remove the headset and put it on again
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.FIRST_FRAME);

        //loop forever
        while (true)
        {
            //Update the position of the device
            WaveVR.Instance.UpdatePoses(WVR_PoseOriginModel.WVR_PoseOriginModel_OriginOnGround);

            //wait the end of frame, so we can play a bit with textures
            yield return(new WaitForEndOfFrame());

            //for each eye (0 = left, 1 = right)
            for (int i = 0; i < 2; i++)
            {
                //notify WaveVR that we want to show the content of the render texture associated with one of the two cameras of the scene.
                //Each camera in the scene has in front of it a big quad, big as its near plane, with half of the texture of the Game Area sent by Unity.
                //This means that the left camera will frame the left part of the screen sent by Unity, and the right camera the right part.
                //Every camera will render this onto a RenderTexture that we'll now send to the ViveWave system, that will draw them onto the screen.
                //Basically we're taking the screen sent by Unity, we're splitting it into half and we're rendering it onto the screen of the Vive Focus device
                WaveVR_Utils.SetRenderTexture(currentRt[i].GetNativeTexturePtr());
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_SubmitL : WaveVR_Utils.k_nRenderEventID_SubmitR);
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_RenderEyeEndL : WaveVR_Utils.k_nRenderEventID_RenderEyeEndR);
            }
        }
#else
        yield break;
#endif
    }
Пример #3
0
    void Update()
    {
#if UNITY_EDITOR
        if (Application.isEditor && !WaveVR.Instance.isSimulatorOn)
        {
            return;
        }
#endif

        bool ret = false;
        do
        {
            WVR_Event_t vrevent = new WVR_Event_t();
#if UNITY_EDITOR
            if (Application.isEditor)
            {
                ret = WaveVR_Utils.WVR_PollEventQueue_S(ref vrevent);
            }
            else
#endif
            {
                ret = Interop.WVR_PollEventQueue(ref vrevent);
            }
            if (ret)
            {
                processVREvent(vrevent);
            }
        } while (ret);
    }
Пример #4
0
    private void OnDisable()
    {
#if !UNITY_EDITOR
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DISABLE);
#endif
        StopCoroutine("MainLoop");
    }
Пример #5
0
 void SetFingerPoints(GestureResultRaw hand, wvr.WVR_SingleFinger_t finger, int startIndex)
 {
     hand.points[startIndex]     = WaveVR_Utils.GetPosition(finger.joint1);
     hand.points[startIndex + 1] = WaveVR_Utils.GetPosition(finger.joint2);
     hand.points[startIndex + 2] = WaveVR_Utils.GetPosition(finger.joint3);
     hand.points[startIndex + 3] = WaveVR_Utils.GetPosition(finger.tip);
 }
Пример #6
0
    private void OnEnable()
    {
#if !UNITY_EDITOR
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_ENABLE);
#endif
        StartCoroutine("MainLoop");
    }
        public TexturePool(TextureConfig cfg, int size, WVR_Eye eye) : base(eye)
        {
            using (var ee = Log.ee("WVR_TexMngr", "TexturePool+", "TexturePool-"))
            {
                isReleased = false;

#if UNITY_EDITOR
                // Editor doesn't need the texture queue.
                size = 1;
#endif

                this.isLeft = isLeft;
                this.size   = size;
                for (int i = 0; i < size; i++)
                {
                    currentRt       = CreateTexture(cfg);
                    currentPtr      = GetNativePtr(currentRt);
                    currentDepthPtr = GetNativeDepthBufferPtr(currentRt);

                    textures.Add(currentPtr, currentRt);
                    depthes.Add(currentPtr, currentDepthPtr);

                    Log.d("WVR_TexMngr", "Gen rt" + currentPtr + " dp" + currentDepthPtr);
                }
                keyArray = new IntPtr[textures.Count];
                textures.Keys.CopyTo(keyArray, 0);

#if UNITY_EDITOR
                if (!Application.isEditor)
#endif
                queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft);
            }
        }
Пример #8
0
    private WaveVR()
    {
#if UNITY_EDITOR
        if (Application.isEditor)
        {
            try
            {
                string ipaddr = "";
                //WaveVR_Utils.SIM_ConnectType type = WaveVR_Utils.SIM_ConnectType.SIM_ConnectType_USB;
                System.IntPtr ptrIPaddr = Marshal.StringToHGlobalAnsi(ipaddr);
                WaveVR_Utils.WVR_SetPrintCallback_S(WaveVR_Utils.PrintLog);
                WaveVR_Utils.SIM_InitError error = WaveVR_Utils.WVR_Init_S(0, ptrIPaddr);

                if (error != 0)
                {
                    WaveVR_Utils.WVR_Quit_S();
                    return;
                }
                isSimulatorOn = true;
            }
            catch (Exception e)
            {
                return;
            }
        }
        else
#endif
        {
            Log.d(LOG_TAG, "WaveVR()+");

            WVR_InitError error = Interop.WVR_Init(WVR_AppType.WVR_AppType_VRContent);
            if (error != WVR_InitError.WVR_InitError_None)
            {
                ReportError(error);
                Interop.WVR_Quit();
                return;
            }
            WaveVR_Utils.notifyActivityUnityStarted();
        }

        for (int i = 0; i < 3; i++)
        {
            poses[i]          = new WVR_DevicePosePair_t();
            connected[i]      = false; // force update connection status to all listener.
            deviceIndexMap[i] = 0;     // use hmd's id as default.
        }

        hmd             = new Device(WVR_DeviceType.WVR_DeviceType_HMD);
        controllerLeft  = new Device(WVR_DeviceType.WVR_DeviceType_Controller_Left);
        controllerRight = new Device(WVR_DeviceType.WVR_DeviceType_Controller_Right);

#if UNITY_EDITOR
        if (Application.isEditor)
        {
        }
        else
#endif
        {
        }
    }
Пример #9
0
    public void stopCamera()
    {
        if (!mStarted)
        {
            return;
        }

        if (syncPose)
        {
            WaveVR_Utils.Event.Remove("DrawCameraCompleted", OnUpdateCameraCompleted);
            Log.i(LOG_TAG, "Reset WaveVR_Render submit pose");
            WaveVR_Render.ResetPoseUsedOnSubmit();
            Interop.WVR_StopCamera();
            if (mframeBuffer != IntPtr.Zero)
            {
                Marshal.FreeHGlobal(mframeBuffer);
                mframeBuffer = IntPtr.Zero;
            }
            mStarted = false;
        }
        else
        {
            if (mthread != null && mthread.IsAlive)
            {
                toThreadStop = true;
                Log.i(LOG_TAG, "to thread stop");
            }
        }

        Log.i(LOG_TAG, "Release native texture resources");
        WaveVR_Utils.SendRenderEvent(WaveVR_Utils.RENDEREVENTID_ReleaseTexture);
    }
Пример #10
0
    private WaveVR()
    {
        Log.d(LOG_TAG, "WaveVR()+");

        WVR_InitError error = Interop.WVR_Init(WVR_AppType.WVR_AppType_VRContent);

        if (error != WVR_InitError.WVR_InitError_None)
        {
            ReportError(error);
            Interop.WVR_Quit();
            Debug.Log("WVR_Quit");
            return;
        }
        WaveVR_Utils.notifyActivityUnityStarted();

        for (int i = 0; i < 3; i++)
        {
            poses[i]          = new WVR_DevicePosePair_t();
            connected[i]      = false; // force update connection status to all listener.
            deviceIndexMap[i] = 0;     // use hmd's id as default.
        }

        hmd             = new Device(WVR_DeviceType.WVR_DeviceType_HMD);
        controllerLeft  = new Device(WVR_DeviceType.WVR_DeviceType_Controller_Left);
        controllerRight = new Device(WVR_DeviceType.WVR_DeviceType_Controller_Right);

        Log.d(LOG_TAG, "WaveVR()-");
    }
Пример #11
0
        public TexturePool(TextureConfig cfg, int size, bool isLeft)
        {
            isReleased = false;

            // Editor doesn't need the texture queue.
            if (Application.isEditor)
            {
                size = 1;
            }

            this.isLeft = isLeft;
            this.size   = size;
            for (int i = 0; i < size; i++)
            {
                currentRt              = new RenderTexture(cfg.w, cfg.h, cfg.depth, cfg.format, RenderTextureReadWrite.Default);
                currentRt.useMipMap    = cfg.useMipMap;
                currentRt.wrapMode     = cfg.wrapMode;
                currentRt.filterMode   = cfg.filterMode;
                currentRt.anisoLevel   = cfg.anisoLevel;
                currentRt.antiAliasing = cfg.antiAliasing;
                currentRt.Create();
                currentPtr = currentRt.GetNativeTexturePtr();

                textures.Add(currentPtr, currentRt);
            }
#if UNITY_EDITOR
            if (Application.isEditor)
            {
                return;
            }
#endif
            var array = new IntPtr[textures.Count];
            textures.Keys.CopyTo(array, 0);
            queue = WaveVR_Utils.WVR_StoreRenderTextures(array, size, isLeft);
        }
Пример #12
0
        public void next()
        {
            if (isReleased)
            {
                return;
            }
            Profiler.BeginSample("Next");
            Log.gpl.d(TextureManager.TAG, "Get texture from queue");

#if UNITY_EDITOR
            if (Application.isEditor)
            {
                // We can test the dictionary in editor.
                currentPtr = keyArray[keyArrayIndex++];
                if (keyArrayIndex >= textures.Count)
                {
                    keyArrayIndex = 0;
                }
            }
            else
#endif
            {
                currentPtr = (Int32)WaveVR_Utils.WVR_GetAvailableTextureID(queue);
            }

            currentRt       = GetRenderTextureByPtr(currentPtr);
            currentDepthPtr = GetDepthByPtr(currentPtr);
            //Log.d(TextureManager.TAG, "current rt" + currentPtr + " dp" + currentDepthPtr);
            if (!CfgValidate(currentRt))
            {
                ReplaceCurrentWithNewTexture();
            }
            Profiler.EndSample();
        }
Пример #13
0
    void OnDisable()
    {
        Log.d(LOG_TAG, "OnDisable()+");
        enableRenderLoop(false);
#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DISABLE);
            if (synchronizer != null)
            {
                synchronizer.sync();
            }
        }
        WaveVR_Utils.Event.Remove("IpdChanged", OnIpdChanged);
        setLoadingCanvas(false);

        if (lefteye != null)
        {
            lefteye.getCamera().targetTexture = null;
        }
        if (righteye != null)
        {
            righteye.getCamera().targetTexture = null;
        }
        if (textureManager != null)
        {
            textureManager.ReleaseTexturePools();
        }

        Log.d(LOG_TAG, "OnDisable()-");
    }
Пример #14
0
    private void Awake()
    {
        //singleton implementation

        //Check if instance already exists
        if (instance == null)
        {
            //if not, set instance to this
            instance = this;

            //init the VR app

#if !UNITY_EDITOR
            //Init the HMD. Without this, the WaveVR system doesn't get data from its sensors.
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.HMD_INITIAILZED);
#endif

            //init the app
            Screen.sleepTimeout         = SleepTimeout.NeverSleep;
            Application.targetFrameRate = 90; //Focus is actually 75, but with 90 things seem a little better to me :)
        }
        //If instance already exists and it's not this:
        else if (instance != this)
        {
            //Then destroy this. This enforces our singleton pattern, meaning there can only ever be one instance of a InstantPreviewer.
            Destroy(gameObject);
        }
    }
Пример #15
0
        // Replace the texture, which is 'not used by ATW', to new configured another texture.
        // Create full textures of a queue in once is very heavy loading.  Thus, we replace
        // the texture in a queue one by one.
        private void ReplaceCurrentWithNewTexture()
        {
            Profiler.BeginSample("NewTexture");

            // It will always get an error internally due to our EGL hacking.  Close the callstack dump for speed.
            var origin = Application.GetStackTraceLogType(LogType.Error);

            Application.SetStackTraceLogType(LogType.Error, StackTraceLogType.None);

            // Remove old texture from lists.
            textures.Remove(currentPtr);
            depthes.Remove(currentPtr);
            cfgs.Remove(currentPtr);
            rts.Remove(currentRt);
            ReleaseTexture(currentRt);

            // Create new texture
            T   newRt;
            int newPtr, newDepthPtr;

            newRt = CreateTexture(cfg);
            rts.Add(newRt);

            newPtr      = GetNativePtr(newRt);
            newDepthPtr = GetNativeDepthBufferPtr(newRt);

            textures.Add(newPtr, newRt);
            depthes.Add(newPtr, newDepthPtr);
            cfgs.Add(newPtr, cfg);

            Log.i(TextureManager.TAG, Log.CSB
                  .Append("Rm  rt=").Append(currentPtr).Append(" dp=").Append(currentDepthPtr).Append(" ")
                  .Append("Gen rt=").Append(newPtr).Append(" dp=").Append(newDepthPtr)
                  .ToString());

            // Use "Error" to log OUR hint.  Make sure it be filter with other error logs. don't change it to W or I.
            // The libEGL and Unity error will show because WaveVR change the egl surface for necessary.  Every game using WaveVR Unity plugin will have these logs.
            Log.e(TextureManager.TAG, "If the libEGL and Unity errors appeared above, don't panic or report a bug.  They are safe and will not crash your game.");
            Application.SetStackTraceLogType(LogType.Error, origin);

            if (keyArray.Length != textures.Count)
            {
                keyArray = new Int32[textures.Count];
            }
            textures.Keys.CopyTo(keyArray, 0);

#if UNITY_EDITOR
            if (!Application.isEditor)
#endif
            // The WVR_ReplaceCurrentTextureID will replace the texture which is specified by WVR_GetAvailableTextureID.
            WaveVR_Utils.WVR_ReplaceCurrentTextureID(queue, new IntPtr(newPtr));

            // Assign new to curent
            currentRt       = newRt;
            currentPtr      = newPtr;
            currentDepthPtr = newDepthPtr;

            Profiler.EndSample();
        }
Пример #16
0
        public TexturePool(TextureConfig cfg, int size, WVR_Eye eye) : base(eye)
        {
#if !UNITY_STANDALONE
            this.cfg = cfg;
            using (var ee = Log.ee(TextureManager.TAG, "TexturePool+", "TexturePool-"))
            {
                isReleased = false;

#if UNITY_EDITOR
                // Editor doesn't need the texture queue.
                size = 1;
#endif

                this.isLeft = isLeft;
                this.size   = size;

                // It will always get an error internally due to our EGL hacking.  Close the callstack dump for speed.
                var origin = Application.GetStackTraceLogType(LogType.Error);
                Application.SetStackTraceLogType(LogType.Error, StackTraceLogType.None);

                for (int i = 0; i < size; i++)
                {
                    rts.Add(CreateTexture(cfg));
                }

                // Call GetNativePtr once after all texture are created.  Try not to block render thread too long.
                for (int i = 0; i < size; i++)
                {
                    T rt = rts[i];
                    currentPtr      = GetNativePtr(rt);
                    currentDepthPtr = GetNativeDepthBufferPtr(rt);

                    textures.Add(currentPtr, rt);
                    depthes.Add(currentPtr, currentDepthPtr);
                    cfgs.Add(currentPtr, cfg);

                    Log.i(TextureManager.TAG, "Gen rt" + currentPtr + " dp" + currentDepthPtr);
                }

                Log.e(TextureManager.TAG, "Don't worry about the libEGL and Unity error showing above.  They are safe and will not crash your game.");
                Application.SetStackTraceLogType(LogType.Error, origin);

                keyArray = new Int32[textures.Count];
                textures.Keys.CopyTo(keyArray, 0);

#if UNITY_EDITOR && UNITY_ANDROID
                if (!Application.isEditor)
#endif
                if (eye == WVR_Eye.WVR_Eye_Both)
                {
                    queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft, WVR_TextureTarget.WVR_TextureTarget_2D_ARRAY);
                }
                else
                {
                    queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft, WVR_TextureTarget.WVR_TextureTarget_2D);
                }
            }
#endif
        }
Пример #17
0
    void Awake()
    {
        Log.d(LOG_TAG, "Awake()+");
        if (instance == null)
        {
            instance = this;
        }
        else
        {
            Log.w(LOG_TAG, "Render already Awaked");
        }
        synchronizer = new RenderThreadSynchronizer();

        if (globalOrigin >= 0 && globalOrigin <= 3)
        {
            _origin = (WVR_PoseOriginModel)globalOrigin;
            Log.d(LOG_TAG, "Has global tracking space " + _origin);
        }

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            if (WaveVR_Init.Instance == null || WaveVR.Instance == null)
            {
                Log.e(LOG_TAG, "Fail to initialize");
            }

            // This command can make sure native's render code are initialized in render thread.
            InitializeGraphic(synchronizer);

            // Setup render values
            uint w = 0, h = 0;
            Interop.WVR_GetRenderTargetSize(ref w, ref h);
            sceneWidth  = (float)w;
            sceneHeight = (float)h;

            projRawL = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };
            projRawR = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };

            OnIpdChanged(null);
        }

        // May call eglMakeCurrent inside TextureManager()
        if (textureManager == null)
        {
            textureManager = new TextureManager();
        }

        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.HMD_INITIAILZED);

        Screen.sleepTimeout         = SleepTimeout.NeverSleep;
        Application.targetFrameRate = targetFPS;
        Log.d(LOG_TAG, "Awake()-");
    }
Пример #18
0
 void OnDestroy()
 {
     Log.d(LOG_TAG, "OnDestroy()+");
     textureManager = null;
     instance       = null;
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DESTROY);
     Log.d(LOG_TAG, "OnDestroy()-");
 }
    private void SinglePassSubmit(RenderTexture src, RenderTexture dst)
    {
        int eventID = (int)WaveVR.Instance.frameInx % 100;

        GL.IssuePluginEvent(WaveVR_Utils.GetRenderEventFunc(), (int)WaveVR_Utils.RENDEREVENTID_Wait_Get_Poses);
        GL.IssuePluginEvent(WaveVR_Utils.GetRenderEventFunc(), (int)WaveVR_Utils.RENDEREVENTID_SubmitL_Index_Min + (int)eventID);
        GL.IssuePluginEvent(WaveVR_Utils.GetRenderEventFunc(), (int)WaveVR_Utils.RENDEREVENTID_SubmitR_Index_Min + (int)eventID);
    }
Пример #20
0
 void OnApplicationQuit()
 {
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_QUIT);
     if (synchronizer != null)
     {
         synchronizer.sync();
     }
 }
Пример #21
0
 public static void EndSimulator()
 {
     if (WaveVR.Instance != null && WaveVR.Instance.isSimulatorOn)
     {
         WaveVR_Utils.WVR_Quit_S();
         WaveVR.Instance.isSimulatorOn = false;
     }
 }
Пример #22
0
    public bool startCamera()
    {
        WaveVR_Utils.Event.Listen("StartCameraCompleted", OnStartCameraCompleted);
        WaveVR_Utils.Event.Listen("UpdateCameraCompleted", OnUpdateCameraCompleted);

        WaveVR_Utils.SendRenderEvent(WaveVR_Utils.RENDEREVENTID_StartCamera);
        return(true);
    }
Пример #23
0
        public void reset()
        {
            using (var ee = Log.ee(TAG, "reset"))
            {
#if UNITY_EDITOR
                poolSize = 3;
                if (!Application.isEditor)
#endif
                {
                    poolSize = WaveVR_Utils.WVR_GetNumberOfTextures();
                }

                int  size = Mathf.Max(Screen.width / 2, Screen.height);
                uint w    = (uint)size;
                uint h    = (uint)size;
                Interop.WVR_GetRenderTargetSize(ref w, ref h);
                screenWidth  = (int)w;
                screenHeight = (int)h;

                float previewRatio = GetPreviewImageRatio();
                int   scaledWidth  = ToMultipleOfTwo((int)(screenWidth * FinalScale * previewRatio));
                int   scaledHeight = ToMultipleOfTwo((int)(screenHeight * FinalScale * previewRatio));

                int antiAliasing = AllowAntiAliasing ? QualitySettings.antiAliasing : 0;
                if (antiAliasing == 0)
                {
                    antiAliasing = 1;
                }

                Log.d(TAG, "Texture width=" + scaledWidth + " height=" + scaledHeight + " antiAliasing=" + antiAliasing);

                var cfg = new TextureConfig();
                cfg.w            = scaledWidth;
                cfg.h            = scaledHeight;
                cfg.depth        = 24;           // Only 24 has StencilBuffer.  See Unity document.  Only 24 can let VR work normally.
                cfg.format       = RenderTextureFormat.ARGB32;
                cfg.useMipMap    = false;
                cfg.wrapMode     = TextureWrapMode.Clamp;
                cfg.filterMode   = FilterMode.Bilinear;
                cfg.anisoLevel   = 1;
                cfg.antiAliasing = antiAliasing;

                if (validate())
                {
                    ReleaseTexturePools();
                }

                if (IsSinglePass)
                {
                    both = new TexturePoolRenderTexture2DArray(cfg, poolSize);
                }
                else
                {
                    left  = new TexturePoolRenderTexture(cfg, poolSize, WVR_Eye.WVR_Eye_Left);
                    right = new TexturePoolRenderTexture(cfg, poolSize, WVR_Eye.WVR_Eye_Right);
                }
            }              // reset log.ee
        }
        public void reset()
        {
            using (var ee = Log.ee("WVR_TexMngr", "reset"))
            {
#if UNITY_EDITOR
                poolSize = 3;
                if (!Application.isEditor)
#endif
                {
                    poolSize = WaveVR_Utils.WVR_GetNumberOfTextures();
                }

                int  size = Mathf.Max(Screen.width / 2, Screen.height);
                uint w    = (uint)size;
                uint h    = (uint)size;
                if (!Application.isEditor)
                {
                    Interop.WVR_GetRenderTargetSize(ref w, ref h);
                }
                int screenWidth  = (int)(w);
                int screenHeight = (int)(h);

                int antiAliasing = AllowAntiAliasing ? QualitySettings.antiAliasing : 0;
                if (antiAliasing == 0)
                {
                    antiAliasing = 1;
                }

                Log.d("WVR_TexMngr", "TextureManager: screenWidth=" + screenWidth + " screenHeight=" + screenHeight + " antiAliasing=" + antiAliasing);

                var cfg = new TextureConfig();
                cfg.w            = screenWidth;
                cfg.h            = screenHeight;
                cfg.depth        = 24;
                cfg.format       = RenderTextureFormat.ARGB32;
                cfg.useMipMap    = false;
                cfg.wrapMode     = TextureWrapMode.Clamp;
                cfg.filterMode   = FilterMode.Bilinear;
                cfg.anisoLevel   = 1;
                cfg.antiAliasing = antiAliasing;

                if (validate())
                {
                    ReleaseTexturePools();
                }

                if (IsSinglePass)
                {
                    both = new TexturePoolRenderTexture2DArray(cfg, poolSize);
                }
                else
                {
                    left  = new TexturePoolRenderTexture(cfg, poolSize, WVR_Eye.WVR_Eye_Left);
                    right = new TexturePoolRenderTexture(cfg, poolSize, WVR_Eye.WVR_Eye_Right);
                }
            }  // reset log.ee
        }
Пример #25
0
 void OnEnable()
 {
     Log.d(LOG_TAG, "OnEnable()+");
     WaveVR_Utils.Event.Listen("IpdChanged", OnIpdChanged);
     enableRenderLoop(true);
     setLoadingCanvas(true);
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_ENABLE);
     Log.d(LOG_TAG, "OnEnable()-");
 }
Пример #26
0
        public Vector2 GetAxis(WVR_InputId _id)
        {
            if (_id != WVR_InputId.WVR_InputId_Alias1_Touchpad && _id != WVR_InputId.WVR_InputId_Alias1_Trigger)
            {
                Log.e(LOG_TAG, "GetAxis, button " + _id + " does NOT have axis!");
                return(Vector2.zero);
            }

            bool           _connected = false;
            WVR_DeviceType _type      = this.DeviceType;

            #if UNITY_EDITOR || UNITY_STANDALONE
            if (isEditorMode)
            {
                _connected = WaveVR_Controller.Input(this.DeviceType).connected;
                _type      = WaveVR_Controller.Input(this.DeviceType).DeviceType;
            }
            else
            #endif
            {
                if (WaveVR.Instance != null)
                {
                    WaveVR.Device _device = WaveVR.Instance.getDeviceByType(this.DeviceType);
                    _connected = _device.connected;
                    _type      = _device.type;
                }
            }

            #if UNITY_EDITOR || UNITY_STANDALONE
            if (isEditorMode)
            {
                if (!WaveVR.Instance.isSimulatorOn)
                {
                    var system = WaveVR_PoseSimulator.Instance;
                    axis = system.GetAxis(_type, WVR_InputId.WVR_InputId_Alias1_Trigger);
                }
                else
                {
                    if (_connected)
                    {
                        axis = WaveVR_Utils.WVR_GetInputAnalogAxis_S((int)_type, (int)_id);
                    }
                }
            }
            else
            #endif
            {
                if (_connected)
                {
                    axis = Interop.WVR_GetInputAnalogAxis(_type, _id);
                }
            }

            //Log.d (LOG_TAG, "GetAxis: {" + axis.x + ", " + axis.y + "}");
            return(new Vector2(axis.x, axis.y));
        }
Пример #27
0
 public void ResumeUnity()
 {
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_RESUME);
     if (synchronizer != null)
     {
         synchronizer.sync();
     }
     setLoadingCanvas(true);
     enableRenderLoop(true);
 }
Пример #28
0
 public void stopCamera()
 {
     if (!mStarted)
     {
         return;
     }
     WaveVR_Utils.Event.Remove("StartCameraCompleted", OnStartCameraCompleted);
     WaveVR_Utils.Event.Remove("UpdateCameraCompleted", OnUpdateCameraCompleted);
     WaveVR_Utils.SendRenderEvent(WaveVR_Utils.RENDEREVENTID_StopCamera);
 }
Пример #29
0
 public void updateTexture(uint textureId)
 {
     nativeTextureId = textureId;
     if (!mStarted)
     {
         Log.d(LOG_TAG, "camera not started yet");
         return;
     }
     spentTime = System.DateTime.Now.Millisecond;
     WaveVR_Utils.SendRenderEvent(WaveVR_Utils.RENDEREVENTID_UpdateCamera);
 }
Пример #30
0
    void OnApplicationQuit()
    {
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_QUIT);
        if (synchronizer != null)
        {
            synchronizer.sync();
        }
#if UNITY_EDITOR
        WaveVR.EndSimulator();
#endif
    }