private void OnDisable()
        {
            // Clear the texture when no longer active.
            material.mainTexture = null;

            // The video stream must be symmetrically acquired and released in
            // order to properly disable the stream once there are no consumers.
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            source.Release();
        }
Exemplo n.º 2
0
        private void OnEnable()
        {
            // The video stream must be symmetrically acquired and released in
            // order to properly disable the stream once there are no consumers.
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            source.Acquire();

            // Auto-disable if no camera is present.
            if (!source.hasCamera)
            {
                enabled = false;
            }
        }
Exemplo n.º 3
0
        private void Update()
        {
            var config = Settings.instance;

            planeObject = ARCompanion.xrcamBehaviour.planeObject;
            SteamVR_TrackedCamera.VideoStreamTexture videoSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed);
            tcamTex = videoSource.texture;
            if (planeObject != null)
            {
                ARCompanion.xrcamBehaviour.planeMat.SetTexture("_Tex", tcamTex);
                planeObject.transform.localRotation = Quaternion.Euler(-90, 0, -180);
            }
        }
Exemplo n.º 4
0
    void Update()
    {
        var source  = SteamVR_TrackedCamera.Source(undistorted);
        var texture = source.texture;

        if (texture == null)
        {
            return;
        }

        // Apply the latest texture to the material.  This must be performed
        // every frame since the underlying texture is actually part of a ring
        // buffer which is updated in lock-step with its associated pose.
        // (You actually really only need to call any of the accessors which
        // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).
        material.mainTexture = texture;

        // Adjust the height of the quad based on the aspect to keep the texels square.
        var aspect = (float)texture.width / texture.height;

        // The undistorted video feed has 'bad' areas near the edges where the original
        // square texture feed is stretched to undo the fisheye from the lens.
        // Therefore, you'll want to crop it to the specified frameBounds to remove this.
        if (cropped)
        {
            var bounds = source.frameBounds;
            material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin);

            var du = bounds.uMax - bounds.uMin;
            var dv = bounds.vMax - bounds.vMin;
            material.mainTextureScale = new Vector2(du, dv);

            aspect *= Mathf.Abs(du / dv);
        }
        else
        {
            material.mainTextureOffset = Vector2.zero;
            material.mainTextureScale  = new Vector2(1, -1);
        }

        target.localScale         = new Vector3(1, 1.0f / aspect, 1);
        quad.transform.localScale = new Vector3(scale, scale, scale);

        // Apply the pose that this frame was recorded at.
        //if (source.hasTracking)
        //{
        //	var t = source.transform;
        //	target.localPosition = t.pos;
        //	target.localRotation = t.rot;
        //}
    }
Exemplo n.º 5
0
    private void OnDisable()
    {
        if (flipAction != null)
        {
            flipAction.RemoveOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any);
        }

        // Clear the texture when no longer active.
        material.mainTexture = null;

        // The video stream must be symmetrically acquired and released in
        // order to properly disable the stream once there are no consumers.
        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        source.Release();
    }
Exemplo n.º 6
0
    void OnRenderImage(RenderTexture _source, RenderTexture destination)
    {
        //Debug.Log(destination.name);
        SteamVR_TrackedCamera.VideoStreamTexture cameraSource = SteamVR_TrackedCamera.Source(undistorted);
        Texture2D texture = cameraSource.texture;

        if (texture == null)
        {
            Debug.LogWarning("Texture from tracked camera was null.");
            return;
        }
        material.SetFloat(flippedParamId, flipped ? 1 : 0);
        material.SetFloat(useRightParamId, destination.name.Contains("Right") ? 1 : 0);
        material.SetFloat(imageOffsetParamId, ImageOffset);
        Graphics.Blit(texture, destination, material);
    }
Exemplo n.º 7
0
    // Start is called before the first frame update
    void Start()
    {
        LineRenderer lineRenderer = lineRenderObj.AddComponent <LineRenderer>();

        lineRenderer.material      = new Material(Shader.Find("Sprites/Default"));
        lineRenderer.startColor    = Color.red;
        lineRenderer.endColor      = Color.red;
        lineRenderer.startWidth    = 0.005f;
        lineRenderer.endWidth      = 0.005f;
        lineRenderer.positionCount = 2;

        objHovered  = null;
        objSelected = null;

        bool undistort = true;

        source = SteamVR_TrackedCamera.Source(undistort);

        source.Acquire();


        currScale    = inputPlane.transform.localScale;
        currScale.x *= (float)source.texture.width / (float)source.texture.height;

        inputPlane.transform.localScale = currScale;
        //capturePlane.transform.localScale = new Vector3((float)cameraTexture.width / (float)cameraTexture.height, 1, 1);



        /* planeShowHide.AddOnStateUpListener(TriggerShow,handType);
         * planeShowHide.AddOnStateDownListener(TriggerHide,handType); */

/*         WebCamDevice[] devices = WebCamTexture.devices;
 *      webcamTexture = new WebCamTexture();
 *
 *
 *
 *      if (devices.Length > 0)
 *      {
 *          webcamTexture.deviceName = devices[0].name;
 *          Debug.Log(devices[0].name);
 *          webcamTexture.Play();
 *
 *          capturePlane.GetComponent<Renderer>().material.mainTexture = webcamTexture;
 *          //capturePlane.transform.localScale = new Vector3((float)webcamTexture.width / (float)webcamTexture.height, 1, 1);
 *      } */
    }
Exemplo n.º 8
0
    void OnEnable()
    {
        // The video stream must be symmetrically acquired and released in
        // order to properly disable the stream once there are no consumers.
        var source = SteamVR_TrackedCamera.Source(undistorted);

        source.Acquire();

        // Auto-disable if no camera is present.
        if (!source.hasCamera)
        {
            enabled = false;
        }

        spriteQuadCamera = GameObject.Find("SpriteCamera");
        //spriteQuadCamera.transform.localPosition = new Vector3(0.4f, 0.4f, 0.0f);
        updateSpritePosition();
    }
Exemplo n.º 9
0
    private void OnEnable()
    {
        if (flipAction != null)
        {
            flipAction.AddOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any);
        }

        // The video stream must be symmetrically acquired and released in
        // order to properly disable the stream once there are no consumers.
        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        source.Acquire();

        // Auto-disable if no camera is present.
        if (!source.hasCamera)
        {
            enabled = false;
            Debug.LogWarning("No SteamVR_TrackedCamera was detected.");
        }
    }
Exemplo n.º 10
0
    void Update()
    {
        var source  = SteamVR_TrackedCamera.Source(undistorted);
        var texture = source.texture;

        //texture.
        if (texture == null)
        {
            Debug.Log("camera frame not available");
            return;
        }

        if (distortionMap == null)
        {
            Debug.Log("distortionMap frame not available");
            return;
        }

        texture.wrapMode   = TextureWrapMode.Clamp;
        texture.filterMode = FilterMode.Point;

        distortionMap.wrapMode   = TextureWrapMode.Clamp;
        distortionMap.filterMode = FilterMode.Point;

        // Apply the latest texture to the material.  This must be performed
        // every frame since the underlying texture is actually part of a ring
        // buffer which is updated in lock-step with its associated pose.
        // (You actually really only need to call any of the accessors which
        // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).

        material.SetTexture("_MainTex", texture);
        material.SetTexture("_DistortionTex", distortionMap);

        //material.mainTexture = texture;
        //material.mainTexture.filterMode = FilterMode.Bilinear;
        //material.mainTexture.wrapMode = TextureWrapMode.Clamp;

        // Adjust the height of the quad based on the aspect to keep the texels square.
        var aspect = (float)texture.width / texture.height;

        // The undistorted video feed has 'bad' areas near the edges where the original
        // square texture feed is stretched to undo the fisheye from the lens.
        // Therefore, you'll want to crop it to the specified frameBounds to remove this.
        if (cropped)
        {
            var bounds = source.frameBounds;
            //material.mainTextureOffset = new Vector2(Mathf.PI, Mathf.PI);
            //new Vector2(bounds.uMin, bounds.vMin);

            // during executions we havee vMin > vMax
            //material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMax); //new Vector2(0, 0);//new Vector2(bounds.uMin, bounds.vMin);
            material.SetTextureOffset("_MainTex", new Vector2(bounds.uMin, bounds.vMax)); // bounds.vMin

            //material.SetTextureOffset("_MainTex", Vector2.zero);
            var du = bounds.uMax - bounds.uMin;
            var dv = bounds.vMax - bounds.vMin;
            //material.mainTextureScale = new Vector2(du, dv);
            material.SetTextureScale("_MainTex", new Vector2(du, -dv));
            //material.SetTextureScale("_MainTex", new Vector2(  1, 1));

            aspect *= Mathf.Abs(du / dv);
        }
        else
        {
            //material.mainTextureOffset = Vector2.zero;
            material.SetTextureOffset("_MainTex", Vector2.zero);

            //material.mainTextureScale = new Vector2(1, -1);
            material.SetTextureScale("_MainTex", new Vector2(1, 1));
        }


        /*
         * transform.localRotation = camToHeadRotQuat;
         * transform.localPosition = camToHeadPos;
         * transform.localScale = camToHeadScale;
         */
        //transform.localScale = new Vector3(0.5f, 0.5f, 0.5f);

        ///*
        // Apply the pose that this frame was recorded at.
        if (source.hasTracking)
        {
            var t = source.transform;
            transform.position = t.pos;
            transform.rotation = t.rot;

            //Debug.Log("tpos: " + t.pos.x + " " + t.pos.y + " " + t.pos.z);
        }
        //*/

        /*
         *      target.localScale = new Vector3(1, 1.0f / aspect, 1);
         *
         *      // Apply the pose that this frame was recorded at.
         *      if (source.hasTracking)
         *      {
         *              var t = source.transform;
         *              target.localPosition = t.pos;
         *              target.localRotation = t.rot;
         *      }
         */
    }
Exemplo n.º 11
0
 private void OnDisable()
 {
     this.material.mainTexture = null;
     SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0);
     videoStreamTexture.Release();
 }
Exemplo n.º 12
0
    private void Update()
    {
        if (Input.GetKeyDown(KeyCode.LeftArrow))
        {
            ImageOffset += 1f / 128;
        }
        else if (Input.GetKeyDown(KeyCode.RightArrow))
        {
            ImageOffset -= 1f / 128;
        }

        if (Input.GetKeyDown(KeyCode.Space))
        {
            flipped = !flipped;
        }

        SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
        Texture2D texture = source.texture;

        if (texture == null)
        {
            Debug.LogWarning("Texture from tracked camera was null.");
            return;
        }

        // Apply the latest texture to the material.  This must be performed
        // every frame since the underlying texture is actually part of a ring
        // buffer which is updated in lock-step with its associated pose.
        // (You actually really only need to call any of the accessors which
        // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).
        material.mainTexture = texture;

        // Adjust the height of the quad based on the aspect to keep the texels square.
        float aspect = (float)texture.width / texture.height;

        // The undistorted video feed has 'bad' areas near the edges where the original
        // square texture feed is stretched to undo the fisheye from the lens.
        // Therefore, you'll want to crop it to the specified frameBounds to remove this.
        if (cropped)
        {
            VRTextureBounds_t bounds = source.frameBounds;
            material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin);

            float du = bounds.uMax - bounds.uMin;
            float dv = bounds.vMax - bounds.vMin;
            material.mainTextureScale = new Vector2(du, dv);

            aspect *= Mathf.Abs(du / dv);
        }
        else
        {
            material.mainTextureOffset = Vector2.zero;
            material.mainTextureScale  = new Vector2(1, -1);
        }

        //target.localScale = new Vector3(1, 1.0f / aspect, 1);

        // Apply the pose that this frame was recorded at.
        if (source.hasTracking)
        {
            SteamVR_Utils.RigidTransform rigidTransform = source.transform;
            //target.localPosition = rigidTransform.pos;
            //target.localRotation = rigidTransform.rot;
        }
    }
Exemplo n.º 13
0
        private void Update()
        {
            SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted);
            Texture2D texture      = source.texture;
            Texture2D Righttexture = source.texture;

            if (texture == null)
            {
                return;
            }

            // Apply the latest texture to the material.  This must be performed
            // every frame since the underlying texture is actually part of a ring
            // buffer which is updated in lock-step with its associated pose.
            // (You actually really only need to call any of the accessors which
            // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture).
            material.mainTexture = texture;

            // Adjust the height of the quad based on the aspect to keep the texels square.
            float aspect = (float)texture.width / texture.height;

            // The undistorted video feed has 'bad' areas near the edges where the original
            // square texture feed is stretched to undo the fisheye from the lens.
            // Therefore, you'll want to crop it to the specified frameBounds to remove this.
            if (cropped)
            {
                VRTextureBounds_t bounds = source.frameBounds;
                material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin);

                float du = bounds.uMax - bounds.uMin;
                float dv = bounds.vMax - bounds.vMin;

                if (Left)//左目の表示
                {
                    material.mainTextureScale = new Vector2(du, dv / 2);
                }
                else//右目の表示
                {
                    material.mainTextureScale = new Vector2(du, -dv / 2);
                }

                aspect *= Mathf.Abs(du / dv);
            }
            else
            {
                material.mainTextureOffset = Vector2.zero;
                material.mainTextureScale  = new Vector2(1, -1);
            }

            /*
             * target.localScale = new Vector3(1, 1.0f / aspect, 1);
             *
             * // Apply the pose that this frame was recorded at.
             * if (source.hasTracking)
             * {
             *  SteamVR_Utils.RigidTransform rigidTransform = source.transform;
             *  target.localPosition = rigidTransform.pos;
             *  target.localRotation = rigidTransform.rot;
             * }
             */
            // Apply the pose that this frame was recorded at.
            if (source.hasTracking)
            {
                Vector2 ProjectionScale = GetProjectionScale(source);
                Vector2 LocalScale      = new Vector2(2.0f * ProjectionZ / ProjectionScale.x, 2.0f * ProjectionZ / ProjectionScale.y);

                if (Left)//左目の表示
                {
                    target.localScale = new Vector3(LocalScale.x, LocalScale.y / 2, 1.0f);
                }
                else//右目の表示
                {
                    target.localScale = new Vector3(LocalScale.x, -LocalScale.y / 2, 1.0f);
                }

                //
                var t = source.transform;
                target.localPosition = t.TransformPoint(new Vector3(0.0f, 0.0f, ProjectionZ));
                target.localRotation = t.rot;
            }
        }