/// <summary> /// Uninitialize and clean up AR Foundation components. /// </summary> private void UninitializeARFoundation() { if (!isInitialized) { return; } if (!preExistingArSessionOriginObject && (arSessionOriginObject != null)) { UnityObjectExtensions.DestroyObject(trackedPoseDriver); trackedPoseDriver = null; UnityObjectExtensions.DestroyObject(arCameraBackground); arCameraBackground = null; UnityObjectExtensions.DestroyObject(arCameraManager); arCameraManager = null; UnityObjectExtensions.DestroyObject(arSessionOrigin); arSessionOrigin = null; } if (!preExistingArSessionObject && (arSessionObject != null)) { UnityObjectExtensions.DestroyObject(arInputManager); arInputManager = null; UnityObjectExtensions.DestroyObject(arSession); arSession = null; UnityObjectExtensions.DestroyObject(arSessionObject); arSessionObject = null; } isInitialized = false; }
private void SetupRenderCameraForAR() { #if USES_AR_KIT if (mainCamera.GetComponent <UnityARVideo> ()) { renderCam.clearFlags = CameraClearFlags.SolidColor; ARKitCameraRender component = renderCam.gameObject.AddComponent <ARKitCameraRender> (); component.m_ClearMaterial = mainCamera.GetComponent <UnityARVideo> ().m_ClearMaterial; } #endif #if USES_AR_CORE if (mainCamera.GetComponent <GoogleARCore.ARCoreBackgroundRenderer> ()) { renderCam.clearFlags = CameraClearFlags.SolidColor; GoogleARCore.ARCoreBackgroundRenderer component = renderCam.gameObject.AddComponent <GoogleARCore.ARCoreBackgroundRenderer> (); component.BackgroundMaterial = mainCamera.GetComponent <GoogleARCore.ARCoreBackgroundRenderer> ().BackgroundMaterial; // This sucks, the first enabling fails automatically because there isn't a background material. By doing this we still // get an error on the log, but it at least works.. :/ component.enabled = false; component.enabled = true; } #endif #if USES_AR_FOUNDATION if (mainCamera.GetComponent <ARCameraBackground>()) { ARCameraBackground component = renderCam.gameObject.AddComponent <ARCameraBackground>(); } #endif }
private void Awake() { instance = this; cameraTransform = GetComponent <Transform>(); attachedCamera = GetComponent <Camera>(); cameraBackground = GetComponent <ARCameraBackground>(); }
private void ARSubsystemManager_CameraFrameReceived(ARCameraFrameEventArgs newFrameDataToSend) { m_hasARFrameDataToSend = true; m_ARFrameDataToSend = newFrameDataToSend; if (finalRt == null) { var desc = arCamera.targetTexture.descriptor; desc.depthBufferBits = 0; rt = new RenderTexture(desc); finalRt = new RenderTexture(desc); desc.width = desc.width / arPreviewScale; desc.height = desc.height / arPreviewScale; toStream = new RenderTexture(desc); syncedFrame.texture = finalRt; } if (m_background == null) { m_background = arCamera.GetComponent <ARCameraBackground>(); } if (m_background.material != null && !m_hasARFrameImageToSet) { Graphics.Blit(null, rt, m_background.material); Graphics.Blit(null, toStream, m_background.material); } }
private void Start() { _arCamera = DepthSource.ARCamera; Debug.Assert(_arCamera != null, "The scene must include a camera object to get the background texture."); Debug.Assert(BackgroundMaterial); _backgroundRenderer = _arCamera.GetComponent <ARCameraBackground>(); if (_backgroundRenderer == null) { Debug.LogError( "BackgroundTextureProvider requires ARCameraBackground " + "anywhere in the scene."); return; } _backgroundRenderer.enabled = false; _backgroundRenderer.enabled = true; _commandBuffer = new CommandBuffer(); _commandBuffer.name = "Camera texture"; _backgroundTextureID = Shader.PropertyToID(BackgroundTexturePropertyName); _commandBuffer.GetTemporaryRT(_backgroundTextureID, /*width=*/ -1, /*height=*/ -1, /*depthBuffer=*/ 0, FilterMode.Bilinear); // Alternatively, can blit from BuiltinRenderTextureType.CameraTarget into // _backgroundTextureID, but make sure this is executed after the renderer is initialized. _commandBuffer.Blit( _backgroundRenderer.material.GetTexture(_mainTex), _backgroundTextureID, BackgroundMaterial); _commandBuffer.SetGlobalTexture(BackgroundTexturePropertyName, _backgroundTextureID); _arCamera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, _commandBuffer); }
void InitializeCameraProvider() { var camera = MarsRuntimeUtils.GetActiveCamera(true); if (camera) { m_ARCameraBackground = camera.GetComponent <ARCameraBackground>(); if (!m_ARCameraBackground) { m_ARCameraBackground = camera.gameObject.AddComponent <ARCameraBackground>(); m_NewARCameraBackground = m_ARCameraBackground; m_ARCameraBackground.hideFlags = HideFlags.DontSave; } m_ARCameraManager = camera.GetComponent <ARCameraManager>(); if (!m_ARCameraManager) { m_ARCameraManager = camera.gameObject.AddComponent <ARCameraManager>(); m_NewARCameraManager = m_ARCameraManager; m_ARCameraManager.hideFlags = HideFlags.DontSave; } m_ARCameraManager.frameReceived += ARCameraManagerOnFrameReceived; } m_CurrentProjectionMatrix = null; }
void Start() { // the ARCamera game object comes with a script that sets the background as the phone's camera feed. arCameraBackgroundScript = Camera.main.GetComponent <ARCameraBackground>(); entrance = transform.parent.GetChild(1).gameObject; before = mixer.FindSnapshot("Before"); }
void Start() { arOrigin = FindObjectOfType <ARSessionOrigin>(); rend = doorMaterial.GetComponent <Renderer>(); arBackground = head.GetComponent <ARCameraBackground>(); door.SetActive(false); arBackground.customMaterial = skyBox; // At start, use the first material rend.material = solid; }
void Start() { // the ARCamera game object comes with a script that sets the background as the phone's camera feed. // we can disable this to disable our skybox instead. arCameraBackgroundScript = Camera.main.GetComponent <ARCameraBackground>(); exit = transform.parent.GetChild(2).gameObject; after = mixer.FindSnapshot("After"); }
void Start() { ARCameraBackground arcbg = GetComponent <ARCameraBackground> (); arcbg.useCustomMaterial = true; #if UNITY_IOS arcbg.customMaterial = _ARKitBackground; #else arcbg.customMaterial = _ARCoreBackground; #endif }
private void Start() { arCameraBG = ARCamera.GetComponent <ARCameraBackground>(); arCameraPosition = ARCamera.transform.position; arCameraRotation = ARCamera.transform.eulerAngles; CameraMoveJoystick.gameObject.SetActive(false); CameraRotateJoystick.gameObject.SetActive(false); ARCameraVis.SetActive(false); VRCamera.enabled = false; }
public void Start() { var location = Repositories.LocationsRepository.GetLocationByName(); SetStartPositionBasedOnSyncPoint(); _cameraBackground = arCameraGameObject.GetComponent <ARCameraBackground>(); var spritePath = $"Sprites/{location.mapFileName}"; var mapObject = (GameObject)Resources.Load(spritePath); GetComponent <SpriteRenderer>().sprite = mapObject.GetComponent <SpriteRenderer>().sprite; gameObject.transform.rotation = location.rotation; LocationSync(); }
void Start() { if (sampleCamera == null) { sampleCamera = gameObject.GetComponent <Camera>(); } // Set the ARCameraBackground property m_ARCameraBackground = sampleCamera.GetComponent <ARCameraBackground>(); // Set the texture to sample from for the probe. cameraImage = new RenderTexture(Screen.width, Screen.height, 24); }
private void Start() { _arSessionOrigin = FindObjectOfType <ARSessionOrigin>(); _arCameraBackground = FindObjectOfType <ARCameraBackground>(); _arCameraManager = FindObjectOfType <ARCameraManager>(); _arSession = FindObjectOfType <ARSession>(); _arCameraManager.frameReceived += (e) => { if (e.textures.Count > 0) { _lastReceived = e.textures[0]; } }; }
/// <summary> /// Initialize AR Foundation components. /// </summary> /// <remarks> /// This method ensures AR Foundation required components (ex: AR Session, Tracked Pose Driver, etc) are /// exist or are added to the appropriate scene objects. These components are used by AR Foundation to /// communicate with the underlying AR platform (ex: AR Core), track the device and perform other necessary tasks. /// </remarks> private void InitializeARFoundation() { if (!isSupportedArConfiguration) { return; } if (isInitialized) { return; } FindARFoundationComponents(); if (arSessionObject == null) { arSessionObject = new GameObject("AR Session"); arSessionObject.transform.parent = null; } arSession = arSessionObject.EnsureComponent <ARSession>(); arInputManager = arSessionObject.EnsureComponent <ARInputManager>(); if (arSessionOriginObject == null) { arSessionOriginObject = MixedRealityPlayspace.Transform.gameObject; } CameraCache.Main.transform.parent = arSessionOriginObject.transform; arSessionOrigin = arSessionOriginObject.EnsureComponent <ARSessionOrigin>(); arSessionOrigin.camera = CameraCache.Main; GameObject cameraObject = arSessionOrigin.camera.gameObject; arCameraManager = cameraObject.EnsureComponent <ARCameraManager>(); arCameraBackground = cameraObject.EnsureComponent <ARCameraBackground>(); trackedPoseDriver = cameraObject.EnsureComponent <TrackedPoseDriver>(); trackedPoseDriver.SetPoseSource( TrackedPoseDriver.DeviceType.GenericXRDevice, ArEnumConversion.ToUnityTrackedPose(poseSource)); trackedPoseDriver.trackingType = ArEnumConversion.ToUnityTrackingType(trackingType); trackedPoseDriver.updateType = ArEnumConversion.ToUnityUpdateType(updateType); trackedPoseDriver.UseRelativeTransform = false; isInitialized = true; }
/// <summary> /// Add the background rendering pass when rendering a game camera with an enabled AR camera background component. /// </summary> /// <param name="renderer">The sriptable renderer in which to enqueue the render pass.</param> /// <param name="renderingData">Additional rendering data about the current state of rendering.</param> public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) { #if UNITY_EDITOR Camera currentCamera = renderingData.cameraData.camera; if ((currentCamera != null) && (currentCamera.cameraType == CameraType.Game)) { ARCameraBackground cameraBackground = currentCamera.gameObject.GetComponent <ARCameraBackground>(); if ((cameraBackground != null) && cameraBackground.backgroundRenderingEnabled && (cameraBackground.material != null)) { bool invertCulling = cameraBackground.GetComponent <ARCameraManager>()?.subsystem?.invertCulling ?? false; m_ScriptablePass.Setup(m_BackgroundMesh, cameraBackground.material, invertCulling); renderer.EnqueuePass(m_ScriptablePass); } } #endif // !UNITY_EDITOR }
protected virtual void Awake() { if (arManager == null) { arManager = GetComponent <ARCameraManager>(); } if (arBackground == null) { arBackground = FindObjectOfType <ARCameraBackground>(); } if (cameraFinal == null) { if (arManager != null) { cameraFinal = arManager.GetComponent <Camera>(); } } }
void Start() { // Get reference to AR Raycast Manager within this game object arRaycaster = GetComponent <ARRaycastManager>(); m_ARCameraBackground = ARCamera.GetComponent <ARCameraBackground>(); cameraManager = ARCamera.GetComponent <ARCameraManager>(); status.text = "Capture an image to create a 3D model"; // Create instance of our object and hide it until it won't be placed arInstance = Instantiate(arPrefab); arInstance.gameObject.transform.eulerAngles = new Vector3(0, 180, 0); //arInstance.gameObject.transform.localScale = new Vector3(1, 1, (float)m_LastCameraTexture.height / m_LastCameraTexture.width); arInstance.gameObject.SetActive(false); mat = arInstance.GetComponent <Renderer>().material; raw.enabled = false; }
private void Start() { #if UNITY_ANDROID VRModeON = false; arCameraBG = ARCamera.GetComponent <ARCameraBackground>(); arCameraPosition = ARCamera.transform.position; arCameraRotation = ARCamera.transform.eulerAngles; CameraMoveJoystick.gameObject.SetActive(false); CameraMoveUpJoystick.gameObject.SetActive(false); CameraRotateJoystick.gameObject.SetActive(false); ARCameraVis.SetActive(false); VRCamera.enabled = false; GridPlane.SetActive(false); gridInitPos = GridPlane.transform.position.y; #if AR_ON TrackingManager.Instance.NewLowestPlanePosition += AdjustGridPlane; #endif #endif }
public IEnumerator XRSubsystemsActivation() { // Determine if correct scene has been loaded Assert.That(SceneManager.GetActiveScene().name == "ARScene"); // Check for AR Session and the AR Session component GameObject arSession = GameObject.Find("AR Session"); Assert.IsNotNull(arSession); ARSession arSessionComponent = arSession.GetComponent <ARSession>(); Assert.IsNotNull(arSessionComponent); // Check for the AR Rig which controls the origin of the AR scene and the camera GameObject arRig = GameObject.Find("AR Session Origin"); Assert.IsNotNull(arRig); ARSessionOrigin arOriginComponent = arRig.GetComponent <ARSessionOrigin>(); Assert.IsNotNull(arOriginComponent); // Wait up to 120 frames for ARSession state to change from Initializing to Running int framesWaited = 0; while (ARSession.state != ARSessionState.SessionTracking && framesWaited < 240) { framesWaited++; yield return(null); } Assert.That(ARSession.state == ARSessionState.SessionTracking, "Session State: {0}", ARSession.state); // Once the ARSession is running, the AR Background Renderer should become active and display the camera feed on the screen ARCameraBackground backgroundRenderer = arRig.GetComponentInChildren <ARCameraBackground>(); Assert.That(backgroundRenderer.enabled == true, "ARBackground Renderer Enabled: {0}", backgroundRenderer.enabled); }
void Start() { if (sampleCamera == null) { sampleCamera = gameObject.GetComponent <Camera>(); } // Set the ARCameraBackground property m_ARCameraBackground = sampleCamera.GetComponent <ARCameraBackground>(); // Set the texture to sample from for the probe. cameraImage = new RenderTexture(Screen.width, Screen.height, 24); // check if the probe has a mesh collider; otherwise, add one if (probe.GetComponent <MeshCollider>() == null) { probe.AddComponent <MeshCollider>(); } // set the probe tag probe.tag = "Probe"; }
private void Start() { if (ReplacementMaterial == null) { return; } _backgroundRenderer = FindObjectOfType <ARCameraBackground>(); Debug.Assert(_backgroundRenderer); _backgroundRenderer.useCustomMaterial = true; _backgroundRenderer.customMaterial = ReplacementMaterial; // Reset background renderer to apply custom material change. _backgroundRenderer.enabled = false; _backgroundRenderer.enabled = true; // Resets the fragment shader. if (ReplacementMaterial.HasProperty(_showColorOnly)) { ReplacementMaterial.SetFloat(_showColorOnly, 0f); } }
public void Start() { _cameraBackground = arCameraGameObject.GetComponent <ARCameraBackground>(); _mapCamera = mapCameraGameObject.GetComponent <Camera>(); }
// Start is called before the first frame update void Start() { RayCastKernelIndex = GLEAMCompute.FindKernel("RayCast"); if (sampleCamera == null) { sampleCamera = gameObject.GetComponent <Camera>(); } m_ARCameraBackground = sampleCamera.GetComponent <ARCameraBackground>(); cameraImage = new RenderTexture(Screen.width, Screen.height, 24); cameraImage.enableRandomWrite = true; cameraImage.Create(); sampleTexture = new Texture2D(probeSampleSize, probeSampleSize, TextureFormat.RGBA32, false); outputFrame = new RenderTexture(Screen.width, Screen.height, 24); outputFrame.enableRandomWrite = true; outputFrame.Create(); GLEAMCompute.SetTexture(0, "_DebugTexture", outputFrame); shaderOutput.texture = outputFrame; sampleTextureRT = new RenderTexture(probeSampleSize, probeSampleSize, 24); sampleTextureRT.enableRandomWrite = true; sampleTextureRT.Create(); GLEAMCompute.SetTexture(0, "_SampleTexture", sampleTextureRT); // create _SumsList and _WeightsList RenderTextures sumsListRT = new RenderTexture(probeSampleSize, probeSampleSize, 0, RenderTextureFormat.ARGB32); { sumsListRT.dimension = UnityEngine.Rendering.TextureDimension.Tex3D; sumsListRT.volumeDepth = 6; sumsListRT.wrapMode = TextureWrapMode.Clamp; sumsListRT.filterMode = FilterMode.Trilinear; sumsListRT.enableRandomWrite = true; sumsListRT.Create(); } GLEAMCompute.SetTexture(RayCastKernelIndex, "_SumsList", sumsListRT); weightsListRT = new RenderTexture(probeSampleSize, probeSampleSize, 0, RenderTextureFormat.RHalf); { weightsListRT.dimension = UnityEngine.Rendering.TextureDimension.Tex3D; weightsListRT.volumeDepth = 6; weightsListRT.wrapMode = TextureWrapMode.Clamp; weightsListRT.filterMode = FilterMode.Trilinear; weightsListRT.enableRandomWrite = true; weightsListRT.Create(); } GLEAMCompute.SetTexture(RayCastKernelIndex, "_WeightsList", weightsListRT); // create cubemap 3D texture cubemapRT = new RenderTexture(probeSampleSize, probeSampleSize, 0, RenderTextureFormat.ARGB32); { cubemapRT.dimension = UnityEngine.Rendering.TextureDimension.Tex3D; cubemapRT.volumeDepth = 6; cubemapRT.wrapMode = TextureWrapMode.Clamp; cubemapRT.filterMode = FilterMode.Trilinear; cubemapRT.enableRandomWrite = true; cubemapRT.Create(); } GLEAMCompute.SetTexture(RayCastKernelIndex, "_Cubemap", cubemapRT); GLEAMCompute.SetVector("_CameraDimensions", new Vector2(Screen.width, Screen.height)); // print("width: " + Screen.width + " height: " + Screen.height); sampleArray = new sample[40000]; int stride = 4 * 7; ComputeBuffer sampleBuffer = new ComputeBuffer(40000, stride); sampleBuffer.SetData(sampleArray); GLEAMCompute.SetBuffer(0, "samples", sampleBuffer); ScreenRect = new Rect(0, 0, Screen.width, Screen.height); size = new Vector2Int(6, probeSampleSize); GLEAMCompute.SetInt("_ProbeSampleSize", probeSampleSize); }
// Use this for initialization void Start() { VRModeEnabled = this.GetComponent <GvrViewer>(); arBackground = Head.GetComponent <ARCameraBackground>(); pressed = Elevator.GetComponent <MoveUp>(); }