示例#1
0
        private void OnDisable()
        {
            var config = Settings.instance;

            SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed);
            camSource.Release();
        }
示例#2
0
        private void Awake()
        {
            WebCamDevice[] avaliableWebCams = WebCamTexture.devices;
            webcamTexture  = new WebCamTexture();
            hasDualCameras = false;
            webcamName     = "";

            foreach (var _ in avaliableWebCams.Where(webcam => dualcameraDevices.Contains(webcam.name)).Select(webcam => new { }))
            {
                hasDualCameras = true;
            }

            if (XRSettings.loadedDeviceName == "OpenVR")
            {
                SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(false);
                bool             openvrHasCamera = camSource.hasCamera;
                EVRSettingsError error           = EVRSettingsError.None;
                bool             cameraIsEnabled = OpenVR.Settings.GetBool(OpenVR.k_pch_Camera_Section, OpenVR.k_pch_Camera_EnableCamera_Bool, ref error);
                if (Settings.instance.SelectedWebcam == "Auto" && cameraIsEnabled && openvrHasCamera)
                {
                    Settings.instance.SelectedWebcam = "SteamVR";
                    if (Settings.instance.ProjectionScale < 32)
                    {
                        new CameraOffsetMenu().SetPreset("OpenVR Projection");
                    }
                }
            }
        }
    private void Update()
    {
        SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0);
        Texture2D texture = videoStreamTexture.texture;

        if (texture == null)
        {
            return;
        }
        this.material.mainTexture = texture;
        float num = (float)texture.width / (float)texture.height;

        if (this.cropped)
        {
            VRTextureBounds_t frameBounds = videoStreamTexture.frameBounds;
            this.material.mainTextureOffset = new Vector2(frameBounds.uMin, frameBounds.vMin);
            float num2 = frameBounds.uMax - frameBounds.uMin;
            float num3 = frameBounds.vMax - frameBounds.vMin;
            this.material.mainTextureScale = new Vector2(num2, num3);
            num *= Mathf.Abs(num2 / num3);
        }
        else
        {
            this.material.mainTextureOffset = Vector2.zero;
            this.material.mainTextureScale  = new Vector2(1f, -1f);
        }
        this.target.localScale = new Vector3(1f, 1f / num, 1f);
        if (videoStreamTexture.hasTracking)
        {
            SteamVR_Utils.RigidTransform transform = videoStreamTexture.transform;
            this.target.localPosition = transform.pos;
            this.target.localRotation = transform.rot;
        }
    }
    void StopCamera()
    {
        capturePlane.GetComponent <Renderer>().material.mainTexture = null;
        bool undistort = true;

        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistort);
        source.Release();
    }
示例#5
0
        private void OnEnable()
        {
            var config = Settings.instance;

            SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed);
            camSource.Acquire();
            this.enabled = camSource.hasCamera;
        }
 private void OnEnable()
 {
     SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0);
     videoStreamTexture.Acquire();
     if (!videoStreamTexture.hasCamera)
     {
         base.enabled = false;
     }
 }
示例#7
0
        private void Update()
        {
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            Texture2D texture = source.texture;

            if (texture == null)
            {
                return;
            }

            // Apply the latest texture to the material.  This must be performed
            // every frame since the underlying texture is actually part of a ring
            // buffer which is updated in lock-step with its associated pose.
            // (You actually really only need to call any of the accessors which
            // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).
            material.mainTexture = texture;

            // Adjust the height of the quad based on the aspect to keep the texels square.
            float aspect = (float)texture.width / texture.height;

            // The undistorted video feed has 'bad' areas near the edges where the original
            // square texture feed is stretched to undo the fisheye from the lens.
            // Therefore, you'll want to crop it to the specified frameBounds to remove this.
            if (cropped)
            {
                VRTextureBounds_t bounds = source.frameBounds;
                material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin);

                float du = bounds.uMax - bounds.uMin;
                float dv = bounds.vMax - bounds.vMin;
                material.mainTextureScale = new Vector2(du, dv);

                aspect *= Mathf.Abs(du / dv);
            }
            else
            {
                material.mainTextureOffset = Vector2.zero;
                material.mainTextureScale  = new Vector2(1, -1);
            }

            target.localScale = new Vector3(1, 1.0f / aspect, 1);

            // Apply the pose that this frame was recorded at.
            if (source.hasTracking)
            {
                const float ProjectionZ     = 1.0f;
                Vector2     ProjectionScale = GetProjectionScale(source);
                Vector2     LocalScale      = new Vector2(2.0f * ProjectionZ / ProjectionScale.x, 2.0f * ProjectionZ / ProjectionScale.y);

                target.localScale = new Vector3(LocalScale.x, LocalScale.y, 1.0f);

                //
                var t = source.transform;
                target.localPosition = t.TransformPoint(new Vector3(0.0f, 0.0f, ProjectionZ));
                target.localRotation = t.rot;
            }
        }
示例#8
0
        private void OnDisable()
        {
            // Clear the texture when no longer active.
            material.mainTexture = null;

            // The video stream must be symmetrically acquired and released in
            // order to properly disable the stream once there are no consumers.
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            source.Release();
        }
示例#9
0
        private void OnEnable()
        {
            // The video stream must be symmetrically acquired and released in
            // order to properly disable the stream once there are no consumers.
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            source.Acquire();

            // Auto-disable if no camera is present.
            if (!source.hasCamera)
            {
                enabled = false;
            }
        }
示例#10
0
        private void Update()
        {
            var config = Settings.instance;

            planeObject = ARCompanion.xrcamBehaviour.planeObject;
            SteamVR_TrackedCamera.VideoStreamTexture videoSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed);
            tcamTex = videoSource.texture;
            if (planeObject != null)
            {
                ARCompanion.xrcamBehaviour.planeMat.SetTexture("_Tex", tcamTex);
                planeObject.transform.localRotation = Quaternion.Euler(-90, 0, -180);
            }
        }
示例#11
0
    private void OnDisable()
    {
        if (flipAction != null)
        {
            flipAction.RemoveOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any);
        }

        // Clear the texture when no longer active.
        material.mainTexture = null;

        // The video stream must be symmetrically acquired and released in
        // order to properly disable the stream once there are no consumers.
        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        source.Release();
    }
示例#12
0
    void OnRenderImage(RenderTexture _source, RenderTexture destination)
    {
        //Debug.Log(destination.name);
        SteamVR_TrackedCamera.VideoStreamTexture cameraSource = SteamVR_TrackedCamera.Source(undistorted);
        Texture2D texture = cameraSource.texture;

        if (texture == null)
        {
            Debug.LogWarning("Texture from tracked camera was null.");
            return;
        }
        material.SetFloat(flippedParamId, flipped ? 1 : 0);
        material.SetFloat(useRightParamId, destination.name.Contains("Right") ? 1 : 0);
        material.SetFloat(imageOffsetParamId, ImageOffset);
        Graphics.Blit(texture, destination, material);
    }
示例#13
0
    // Start is called before the first frame update
    void Start()
    {
        LineRenderer lineRenderer = lineRenderObj.AddComponent <LineRenderer>();

        lineRenderer.material      = new Material(Shader.Find("Sprites/Default"));
        lineRenderer.startColor    = Color.red;
        lineRenderer.endColor      = Color.red;
        lineRenderer.startWidth    = 0.005f;
        lineRenderer.endWidth      = 0.005f;
        lineRenderer.positionCount = 2;

        objHovered  = null;
        objSelected = null;

        bool undistort = true;

        source = SteamVR_TrackedCamera.Source(undistort);

        source.Acquire();


        currScale    = inputPlane.transform.localScale;
        currScale.x *= (float)source.texture.width / (float)source.texture.height;

        inputPlane.transform.localScale = currScale;
        //capturePlane.transform.localScale = new Vector3((float)cameraTexture.width / (float)cameraTexture.height, 1, 1);



        /* planeShowHide.AddOnStateUpListener(TriggerShow,handType);
         * planeShowHide.AddOnStateDownListener(TriggerHide,handType); */

/*         WebCamDevice[] devices = WebCamTexture.devices;
 *      webcamTexture = new WebCamTexture();
 *
 *
 *
 *      if (devices.Length > 0)
 *      {
 *          webcamTexture.deviceName = devices[0].name;
 *          Debug.Log(devices[0].name);
 *          webcamTexture.Play();
 *
 *          capturePlane.GetComponent<Renderer>().material.mainTexture = webcamTexture;
 *          //capturePlane.transform.localScale = new Vector3((float)webcamTexture.width / (float)webcamTexture.height, 1, 1);
 *      } */
    }
示例#14
0
    private void OnEnable()
    {
        if (flipAction != null)
        {
            flipAction.AddOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any);
        }

        // The video stream must be symmetrically acquired and released in
        // order to properly disable the stream once there are no consumers.
        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        source.Acquire();

        // Auto-disable if no camera is present.
        if (!source.hasCamera)
        {
            enabled = false;
            Debug.LogWarning("No SteamVR_TrackedCamera was detected.");
        }
    }
示例#15
0
        // プロジェクションのスケールを取得する
        static Vector2 GetProjectionScale(SteamVR_TrackedCamera.VideoStreamTexture source)
        {
            Valve.VR.CVRTrackedCamera trackedCamera = Valve.VR.OpenVR.TrackedCamera;
            if (trackedCamera == null)
            {
                return(Vector2.one);
            }

            // スケール値を取得するだけなので、Near/Farの値は何でも構わない
            const float Near = 1.0f;
            const float Far  = 100.0f;

            Valve.VR.HmdMatrix44_t ProjectionMatrix = new Valve.VR.HmdMatrix44_t();

            if (trackedCamera.GetCameraProjection(source.deviceIndex, 0, source.frameType, Near, Far, ref ProjectionMatrix) !=
                Valve.VR.EVRTrackedCameraError.None)
            {
                return(Vector2.one);
            }

            return(new Vector2(ProjectionMatrix.m0, ProjectionMatrix.m5));
        }
示例#16
0
    private void Update()
    {
        if (Input.GetKeyDown(KeyCode.LeftArrow))
        {
            ImageOffset += 1f / 128;
        }
        else if (Input.GetKeyDown(KeyCode.RightArrow))
        {
            ImageOffset -= 1f / 128;
        }

        if (Input.GetKeyDown(KeyCode.Space))
        {
            flipped = !flipped;
        }

        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        Texture2D texture = source.texture;

        if (texture == null)
        {
            Debug.LogWarning("Texture from tracked camera was null.");
            return;
        }

        // Apply the latest texture to the material.  This must be performed
        // every frame since the underlying texture is actually part of a ring
        // buffer which is updated in lock-step with its associated pose.
        // (You actually really only need to call any of the accessors which
        // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).
        material.mainTexture = texture;

        // Adjust the height of the quad based on the aspect to keep the texels square.
        float aspect = (float)texture.width / texture.height;

        // The undistorted video feed has 'bad' areas near the edges where the original
        // square texture feed is stretched to undo the fisheye from the lens.
        // Therefore, you'll want to crop it to the specified frameBounds to remove this.
        if (cropped)
        {
            VRTextureBounds_t bounds = source.frameBounds;
            material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin);

            float du = bounds.uMax - bounds.uMin;
            float dv = bounds.vMax - bounds.vMin;
            material.mainTextureScale = new Vector2(du, dv);

            aspect *= Mathf.Abs(du / dv);
        }
        else
        {
            material.mainTextureOffset = Vector2.zero;
            material.mainTextureScale  = new Vector2(1, -1);
        }

        //target.localScale = new Vector3(1, 1.0f / aspect, 1);

        // Apply the pose that this frame was recorded at.
        if (source.hasTracking)
        {
            SteamVR_Utils.RigidTransform rigidTransform = source.transform;
            //target.localPosition = rigidTransform.pos;
            //target.localRotation = rigidTransform.rot;
        }
    }
 private void OnDisable()
 {
     this.material.mainTexture = null;
     SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0);
     videoStreamTexture.Release();
 }