private void OnDisable() { var config = Settings.instance; SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed); camSource.Release(); }
private void Awake() { WebCamDevice[] avaliableWebCams = WebCamTexture.devices; webcamTexture = new WebCamTexture(); hasDualCameras = false; webcamName = ""; foreach (var _ in avaliableWebCams.Where(webcam => dualcameraDevices.Contains(webcam.name)).Select(webcam => new { })) { hasDualCameras = true; } if (XRSettings.loadedDeviceName == "OpenVR") { SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(false); bool openvrHasCamera = camSource.hasCamera; EVRSettingsError error = EVRSettingsError.None; bool cameraIsEnabled = OpenVR.Settings.GetBool(OpenVR.k_pch_Camera_Section, OpenVR.k_pch_Camera_EnableCamera_Bool, ref error); if (Settings.instance.SelectedWebcam == "Auto" && cameraIsEnabled && openvrHasCamera) { Settings.instance.SelectedWebcam = "SteamVR"; if (Settings.instance.ProjectionScale < 32) { new CameraOffsetMenu().SetPreset("OpenVR Projection"); } } } }
private void Update() { SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0); Texture2D texture = videoStreamTexture.texture; if (texture == null) { return; } this.material.mainTexture = texture; float num = (float)texture.width / (float)texture.height; if (this.cropped) { VRTextureBounds_t frameBounds = videoStreamTexture.frameBounds; this.material.mainTextureOffset = new Vector2(frameBounds.uMin, frameBounds.vMin); float num2 = frameBounds.uMax - frameBounds.uMin; float num3 = frameBounds.vMax - frameBounds.vMin; this.material.mainTextureScale = new Vector2(num2, num3); num *= Mathf.Abs(num2 / num3); } else { this.material.mainTextureOffset = Vector2.zero; this.material.mainTextureScale = new Vector2(1f, -1f); } this.target.localScale = new Vector3(1f, 1f / num, 1f); if (videoStreamTexture.hasTracking) { SteamVR_Utils.RigidTransform transform = videoStreamTexture.transform; this.target.localPosition = transform.pos; this.target.localRotation = transform.rot; } }
// Token: 0x06000E03 RID: 3587 RVA: 0x000594E2 File Offset: 0x000576E2 public static SteamVR_TrackedCamera.VideoStreamTexture Source(bool undistorted, int deviceIndex = 0) { if (!undistorted) { return(SteamVR_TrackedCamera.Distorted(deviceIndex)); } return(SteamVR_TrackedCamera.Undistorted(deviceIndex)); }
void StopCamera() { capturePlane.GetComponent <Renderer>().material.mainTexture = null; bool undistort = true; SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistort); source.Release(); }
private void OnEnable() { var config = Settings.instance; SteamVR_TrackedCamera.VideoStreamTexture camSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed); camSource.Acquire(); this.enabled = camSource.hasCamera; }
private void OnEnable() { SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0); videoStreamTexture.Acquire(); if (!videoStreamTexture.hasCamera) { base.enabled = false; } }
private void Update() { SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); Texture2D texture = source.texture; if (texture == null) { return; } // Apply the latest texture to the material. This must be performed // every frame since the underlying texture is actually part of a ring // buffer which is updated in lock-step with its associated pose. // (You actually really only need to call any of the accessors which // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture). material.mainTexture = texture; // Adjust the height of the quad based on the aspect to keep the texels square. float aspect = (float)texture.width / texture.height; // The undistorted video feed has 'bad' areas near the edges where the original // square texture feed is stretched to undo the fisheye from the lens. // Therefore, you'll want to crop it to the specified frameBounds to remove this. if (cropped) { VRTextureBounds_t bounds = source.frameBounds; material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin); float du = bounds.uMax - bounds.uMin; float dv = bounds.vMax - bounds.vMin; material.mainTextureScale = new Vector2(du, dv); aspect *= Mathf.Abs(du / dv); } else { material.mainTextureOffset = Vector2.zero; material.mainTextureScale = new Vector2(1, -1); } target.localScale = new Vector3(1, 1.0f / aspect, 1); // Apply the pose that this frame was recorded at. if (source.hasTracking) { const float ProjectionZ = 1.0f; Vector2 ProjectionScale = GetProjectionScale(source); Vector2 LocalScale = new Vector2(2.0f * ProjectionZ / ProjectionScale.x, 2.0f * ProjectionZ / ProjectionScale.y); target.localScale = new Vector3(LocalScale.x, LocalScale.y, 1.0f); // var t = source.transform; target.localPosition = t.TransformPoint(new Vector3(0.0f, 0.0f, ProjectionZ)); target.localRotation = t.rot; } }
void Update() { var source = SteamVR_TrackedCamera.Source(undistorted); //Debug.Log("aniso:" + source.texture.ToString()); //Debug.Log("format:" + source.texture.format.ToString()); var texture = source.texture; if (texture == null) { return; } // Apply the latest texture to the material. This must be performed // every frame since the underlying texture is actually part of a ring // buffer which is updated in lock-step with its associated pose. // (You actually really only need to call any of the accessors which // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture). material.mainTexture = texture; // Adjust the height of the quad based on the aspect to keep the texels square. var aspect = (float)texture.width / texture.height; // The undistorted video feed has 'bad' areas near the edges where the original // square texture feed is stretched to undo the fisheye from the lens. // Therefore, you'll want to crop it to the specified frameBounds to remove this. if (cropped) { var bounds = source.frameBounds; material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin); var du = bounds.uMax - bounds.uMin; var dv = bounds.vMax - bounds.vMin; material.mainTextureScale = new Vector2(du, dv); aspect *= Mathf.Abs(du / dv); } else { material.mainTextureOffset = Vector2.zero; material.mainTextureScale = new Vector2(1, -1); } target.localScale = new Vector3(1, 1.0f / aspect, 1); // Apply the pose that this frame was recorded at. if (source.hasTracking) { var t = source.transform; target.localPosition = t.pos; target.localRotation = t.rot; } Graphics.Blit(source.texture, spoutTexture); }
void FixedUpdate() { //Debug.Log("Eye: x= " + cameraEye.transform.position.x + ", y=" + cameraEye.transform.position.y + ", z=" + +cameraEye.transform.position.z); //Debug.Log("Eye: rx= " + cameraEye.transform.eulerAngles.x + ", ry=" + cameraEye.transform.eulerAngles.y + ", rz=" + cameraEye.transform.eulerAngles.z); var source = SteamVR_TrackedCamera.Source(undistorted); // Debug.Log("Camera: x= " + source.transform.pos.x + ", y=" + source.transform.pos.y + ", z=" + source.transform.pos.z); var texture = source.texture; if (texture == null) { Debug.Log("Frame not captured."); return; } // Apply the latest texture to the material. This must be performed // every frame since the underlying texture is actually part of a ring // buffer which is updated in lock-step with its associated pose. // (You actually really only need to call any of the accessors which // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture). material.mainTexture = texture; // Adjust the height of the quad based on the aspect to keep the texels square. var aspect = (float)texture.width / texture.height; //texWidth = texture.width; //texHeight = texture.height; // The undistorted video feed has 'bad' areas near the edges where the original // square texture feed is stretched to undo the fisheye from the lens. // Therefore, you'll want to crop it to the specified frameBounds to remove this. if (cropped) { var bounds = source.frameBounds; material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin); var du = bounds.uMax - bounds.uMin; var dv = bounds.vMax - bounds.vMin; material.mainTextureScale = new Vector2(du, dv); aspect *= Mathf.Abs(du / dv); } else { material.mainTextureOffset = Vector2.zero; material.mainTextureScale = new Vector2(1, -1); } Vector3 scale = target.localScale; target.localScale = new Vector3(scale.x, scale.x / aspect, scale.z); spriteQuadCamera.transform.localScale = new Vector3(0.05f, 0.05f * aspect, 0.05f); }
private void OnDisable() { // Clear the texture when no longer active. material.mainTexture = null; // The video stream must be symmetrically acquired and released in // order to properly disable the stream once there are no consumers. SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); source.Release(); }
private void OnEnable() { // The video stream must be symmetrically acquired and released in // order to properly disable the stream once there are no consumers. SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); source.Acquire(); // Auto-disable if no camera is present. if (!source.hasCamera) { enabled = false; } }
private void Update() { var config = Settings.instance; planeObject = ARCompanion.xrcamBehaviour.planeObject; SteamVR_TrackedCamera.VideoStreamTexture videoSource = SteamVR_TrackedCamera.Source(config.UndistortRawFeed); tcamTex = videoSource.texture; if (planeObject != null) { ARCompanion.xrcamBehaviour.planeMat.SetTexture("_Tex", tcamTex); planeObject.transform.localRotation = Quaternion.Euler(-90, 0, -180); } }
private void OnDisable() { if (flipAction != null) { flipAction.RemoveOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any); } // Clear the texture when no longer active. material.mainTexture = null; // The video stream must be symmetrically acquired and released in // order to properly disable the stream once there are no consumers. SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); source.Release(); }
void OnRenderImage(RenderTexture _source, RenderTexture destination) { //Debug.Log(destination.name); SteamVR_TrackedCamera.VideoStreamTexture cameraSource = SteamVR_TrackedCamera.Source(undistorted); Texture2D texture = cameraSource.texture; if (texture == null) { Debug.LogWarning("Texture from tracked camera was null."); return; } material.SetFloat(flippedParamId, flipped ? 1 : 0); material.SetFloat(useRightParamId, destination.name.Contains("Right") ? 1 : 0); material.SetFloat(imageOffsetParamId, ImageOffset); Graphics.Blit(texture, destination, material); }
// Start is called before the first frame update void Start() { LineRenderer lineRenderer = lineRenderObj.AddComponent <LineRenderer>(); lineRenderer.material = new Material(Shader.Find("Sprites/Default")); lineRenderer.startColor = Color.red; lineRenderer.endColor = Color.red; lineRenderer.startWidth = 0.005f; lineRenderer.endWidth = 0.005f; lineRenderer.positionCount = 2; objHovered = null; objSelected = null; bool undistort = true; source = SteamVR_TrackedCamera.Source(undistort); source.Acquire(); currScale = inputPlane.transform.localScale; currScale.x *= (float)source.texture.width / (float)source.texture.height; inputPlane.transform.localScale = currScale; //capturePlane.transform.localScale = new Vector3((float)cameraTexture.width / (float)cameraTexture.height, 1, 1); /* planeShowHide.AddOnStateUpListener(TriggerShow,handType); * planeShowHide.AddOnStateDownListener(TriggerHide,handType); */ /* WebCamDevice[] devices = WebCamTexture.devices; * webcamTexture = new WebCamTexture(); * * * * if (devices.Length > 0) * { * webcamTexture.deviceName = devices[0].name; * Debug.Log(devices[0].name); * webcamTexture.Play(); * * capturePlane.GetComponent<Renderer>().material.mainTexture = webcamTexture; * //capturePlane.transform.localScale = new Vector3((float)webcamTexture.width / (float)webcamTexture.height, 1, 1); * } */ }
void OnEnable() { // The video stream must be symmetrically acquired and released in // order to properly disable the stream once there are no consumers. var source = SteamVR_TrackedCamera.Source(undistorted); source.Acquire(); // Auto-disable if no camera is present. if (!source.hasCamera) { enabled = false; } spriteQuadCamera = GameObject.Find("SpriteCamera"); //spriteQuadCamera.transform.localPosition = new Vector3(0.4f, 0.4f, 0.0f); updateSpritePosition(); }
private void OnEnable() { if (flipAction != null) { flipAction.AddOnChangeListener(ToggleFlipped, SteamVR_Input_Sources.Any); } // The video stream must be symmetrically acquired and released in // order to properly disable the stream once there are no consumers. SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); source.Acquire(); // Auto-disable if no camera is present. if (!source.hasCamera) { enabled = false; Debug.LogWarning("No SteamVR_TrackedCamera was detected."); } }
// Token: 0x060028A5 RID: 10405 RVA: 0x000C4774 File Offset: 0x000C2974 public VideoStreamTexture(uint deviceIndex, bool undistorted) { this.undistorted = undistorted; this.videostream = SteamVR_TrackedCamera.Stream(deviceIndex); }
private void Update() { if (Input.GetKeyDown(KeyCode.LeftArrow)) { ImageOffset += 1f / 128; } else if (Input.GetKeyDown(KeyCode.RightArrow)) { ImageOffset -= 1f / 128; } if (Input.GetKeyDown(KeyCode.Space)) { flipped = !flipped; } SteamVR_TrackedCamera.VideoStreamTexture source = SteamVR_TrackedCamera.Source(undistorted); Texture2D texture = source.texture; if (texture == null) { Debug.LogWarning("Texture from tracked camera was null."); return; } // Apply the latest texture to the material. This must be performed // every frame since the underlying texture is actually part of a ring // buffer which is updated in lock-step with its associated pose. // (You actually really only need to call any of the accessors which // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture). material.mainTexture = texture; // Adjust the height of the quad based on the aspect to keep the texels square. float aspect = (float)texture.width / texture.height; // The undistorted video feed has 'bad' areas near the edges where the original // square texture feed is stretched to undo the fisheye from the lens. // Therefore, you'll want to crop it to the specified frameBounds to remove this. if (cropped) { VRTextureBounds_t bounds = source.frameBounds; material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMin); float du = bounds.uMax - bounds.uMin; float dv = bounds.vMax - bounds.vMin; material.mainTextureScale = new Vector2(du, dv); aspect *= Mathf.Abs(du / dv); } else { material.mainTextureOffset = Vector2.zero; material.mainTextureScale = new Vector2(1, -1); } //target.localScale = new Vector3(1, 1.0f / aspect, 1); // Apply the pose that this frame was recorded at. if (source.hasTracking) { SteamVR_Utils.RigidTransform rigidTransform = source.transform; //target.localPosition = rigidTransform.pos; //target.localRotation = rigidTransform.rot; } }
private void OnDisable() { this.material.mainTexture = null; SteamVR_TrackedCamera.VideoStreamTexture videoStreamTexture = SteamVR_TrackedCamera.Source(this.undistorted, 0); videoStreamTexture.Release(); }
void Update() { var source = SteamVR_TrackedCamera.Source(undistorted); var texture = source.texture; //texture. if (texture == null) { Debug.Log("camera frame not available"); return; } if (distortionMap == null) { Debug.Log("distortionMap frame not available"); return; } texture.wrapMode = TextureWrapMode.Clamp; texture.filterMode = FilterMode.Point; distortionMap.wrapMode = TextureWrapMode.Clamp; distortionMap.filterMode = FilterMode.Point; // Apply the latest texture to the material. This must be performed // every frame since the underlying texture is actually part of a ring // buffer which is updated in lock-step with its associated pose. // (You actually really only need to call any of the accessors which // internally call Update on the SteamVR_TrackedCamera.VideoStreamTexture). material.SetTexture("_MainTex", texture); material.SetTexture("_DistortionTex", distortionMap); //material.mainTexture = texture; //material.mainTexture.filterMode = FilterMode.Bilinear; //material.mainTexture.wrapMode = TextureWrapMode.Clamp; // Adjust the height of the quad based on the aspect to keep the texels square. var aspect = (float)texture.width / texture.height; // The undistorted video feed has 'bad' areas near the edges where the original // square texture feed is stretched to undo the fisheye from the lens. // Therefore, you'll want to crop it to the specified frameBounds to remove this. if (cropped) { var bounds = source.frameBounds; //material.mainTextureOffset = new Vector2(Mathf.PI, Mathf.PI); //new Vector2(bounds.uMin, bounds.vMin); // during executions we havee vMin > vMax //material.mainTextureOffset = new Vector2(bounds.uMin, bounds.vMax); //new Vector2(0, 0);//new Vector2(bounds.uMin, bounds.vMin); material.SetTextureOffset("_MainTex", new Vector2(bounds.uMin, bounds.vMax)); // bounds.vMin //material.SetTextureOffset("_MainTex", Vector2.zero); var du = bounds.uMax - bounds.uMin; var dv = bounds.vMax - bounds.vMin; //material.mainTextureScale = new Vector2(du, dv); material.SetTextureScale("_MainTex", new Vector2(du, -dv)); //material.SetTextureScale("_MainTex", new Vector2( 1, 1)); aspect *= Mathf.Abs(du / dv); } else { //material.mainTextureOffset = Vector2.zero; material.SetTextureOffset("_MainTex", Vector2.zero); //material.mainTextureScale = new Vector2(1, -1); material.SetTextureScale("_MainTex", new Vector2(1, 1)); } /* * transform.localRotation = camToHeadRotQuat; * transform.localPosition = camToHeadPos; * transform.localScale = camToHeadScale; */ //transform.localScale = new Vector3(0.5f, 0.5f, 0.5f); ///* // Apply the pose that this frame was recorded at. if (source.hasTracking) { var t = source.transform; transform.position = t.pos; transform.rotation = t.rot; //Debug.Log("tpos: " + t.pos.x + " " + t.pos.y + " " + t.pos.z); } //*/ /* * target.localScale = new Vector3(1, 1.0f / aspect, 1); * * // Apply the pose that this frame was recorded at. * if (source.hasTracking) * { * var t = source.transform; * target.localPosition = t.pos; * target.localRotation = t.rot; * } */ }