public override void SetupCamera(Camera camera) { camera.gameObject.SetActive(false); m_BackgroundRenderer = camera.gameObject.AddComponent <ARCoreBackgroundRenderer>(); m_BackgroundRenderer.BackgroundMaterial = Resources.Load("Materials/ARBackground", typeof(Material)) as Material; camera.gameObject.SetActive(true); }
public override void Install(bool reset) { base.Install(reset); #if UNITY_EDITOR if (ARBackgroundMaterial == null || reset) { ARBackgroundMaterial = ResourceExt.EditorLoadAsset <Material>("Assets/GoogleARCore/SDK/Materials/ARBackground.mat"); } #endif if (arCoreSession == null) { arCoreSession = this.Ensure <ARCoreSession>().Value; arCoreSession.SessionConfig = ScriptableObject.CreateInstance <ARCoreSessionConfig>(); arCoreSession.SessionConfig.MatchCameraFramerate = true; } if (bgRenderer == null) { bgRenderer = this.Ensure <ARCoreBackgroundRenderer>().Value; bgRenderer.BackgroundMaterial = ARBackgroundMaterial; } if (poseDriver == null) { poseDriver = this.Ensure <TrackedPoseDriver>().Value; poseDriver.SetPoseSource(TrackedPoseDriver.DeviceType.GenericXRDevice, TrackedPoseDriver.TrackedPose.ColorCamera); poseDriver.trackingType = TrackedPoseDriver.TrackingType.RotationAndPosition; poseDriver.updateType = TrackedPoseDriver.UpdateType.BeforeRender; poseDriver.UseRelativeTransform = true; } }
private void Awake() { m_Camera = Camera.main; Debug.Assert(m_Camera != null, "The scene must include a camera object to get the background texture."); m_BackgroundRenderer = FindObjectOfType <ARCoreBackgroundRenderer>(); if (m_BackgroundRenderer == null) { Debug.LogError("BackgroundTextureProvider requires ARCoreBackgroundRenderer " + "anywhere in the scene."); return; } m_CommandBuffer = new CommandBuffer(); m_CommandBuffer.name = "Camera texture"; m_BackgroundTextureID = Shader.PropertyToID(BackgroundTexturePropertyName); m_CommandBuffer.GetTemporaryRT(m_BackgroundTextureID, /*width=*/ -1, /*height=*/ -1, /*depthBuffer=*/ 0, FilterMode.Bilinear); // Alternatively, can blit from BuiltinRenderTextureType.CameraTarget into // m_BackgroundTextureID, but make sure this is executed after the renderer is initialized. var material = m_BackgroundRenderer.BackgroundMaterial; if (material != null) { m_CommandBuffer.Blit(material.mainTexture, m_BackgroundTextureID, material); } m_CommandBuffer.SetGlobalTexture(BackgroundTexturePropertyName, m_BackgroundTextureID); m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_CommandBuffer); m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_CommandBuffer); }
// Alternatively you could as said use the type like //[SerializeField] private GoogleARCore.ARCoreBackgroundRenderer arRenderer; private void Awake() { // As a fallback find it on the scene if (!arRenderer) { arRenderer = FindObjectOfType <ARCoreBackgroundRenderer>(); } }
private void Start() { m_Camera = Camera.main; m_BackgroundRenderer = FindObjectOfType <ARCoreBackgroundRenderer>(); if (m_BackgroundRenderer == null) { m_UseDemoRenderer = true; m_DemoRenderer = FindObjectOfType <DemoARBackgroundRenderer>(); if (m_DemoRenderer == null) { Debug.LogError("DemoBackgroundProvider requires ARCoreBackgroundRenderer or" + "DemoARBackgroundRenderer anywhere in the scene."); return; } Debug.Log("DemoARBackgroundRenderer loaded."); } else { Debug.Log("ARCoreTextureProvider loaded."); } m_CommandBuffer = new CommandBuffer(); m_CommandBuffer.name = "Camera texture"; m_BackgroundTextureID = Shader.PropertyToID(BackgroundTexturePropertyName); m_CommandBuffer.GetTemporaryRT(m_BackgroundTextureID, /*width=*/ -1, /*height=*/ -1, /*depthBuffer=*/ 0, FilterMode.Bilinear); // Alternatively, can blit from BuiltinRenderTextureType.CameraTarget into // m_BackgroundTextureID, but make sure this is executed after the renderer is initialized. var material = m_UseDemoRenderer ? m_DemoRenderer.BackgroundMaterial : m_BackgroundRenderer.BackgroundMaterial; if (material != null) { m_CommandBuffer.Blit(material.mainTexture, m_BackgroundTextureID, material); Debug.Log("BackgroundTextureProvider material blited."); } m_CommandBuffer.SetGlobalTexture(BackgroundTexturePropertyName, m_BackgroundTextureID); m_Camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, m_CommandBuffer); m_Camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, m_CommandBuffer); }
void Start() { // 子オブジェクトを全て非表示に foreach (Transform trans in transform) { if (trans.gameObject.activeSelf) { trans.gameObject.SetActive(false); } else { // すでに非表示にしているものは除外 hideObjs_.Add(trans.gameObject); } } backgroundRenderer_ = FindObjectOfType <ARCoreBackgroundRenderer>(); }
// initializes the AR-Core components private void InitArCore() { //Debug.Log("InitArCore started."); // disable the main camera, if any Camera currentCamera = MultiARInterop.GetMainCamera(); if (currentCamera) { currentCamera.gameObject.SetActive(false); } // create ARCore-Device in the scene arCoreDeviceObj = Instantiate(arCoreDevicePrefab, Vector3.zero, Quaternion.identity); arCoreDeviceObj.name = "ARCore Device"; DontDestroyOnLoad(arCoreDeviceObj); // get background material arCodeRenderer = FindObjectOfType <ARCoreBackgroundRenderer>(); if (arCodeRenderer) { backgroundMat = arCodeRenderer.BackgroundMaterial; } // update the session config, if needed ARCoreSession arSession = arCoreDeviceObj.GetComponent <ARCoreSession>(); if (arSession != null && arSession.SessionConfig != null && arImageDatabase != null) { arSession.SessionConfig.AugmentedImageDatabase = arImageDatabase; } // reference to the AR main camera mainCamera = arCoreDeviceObj.GetComponentInChildren <Camera>(); // // disable directional light, if any // Light currentLight = MultiARInterop.GetDirectionalLight(); // if(currentLight) // { // currentLight.gameObject.SetActive(false); // } // // // create AR environmental light // GameObject envLight = new GameObject("Evironmental Light"); // //envLight.transform.position = Vector3.zero; // //envLight.transform.rotation = Quaternion.identity; // envLight.AddComponent<EnvironmentalLight>(); // // // reference to the AR directional light // //directionalLight = envLight.GetComponent<Light>(); // modify the directional light Light currentLight = MultiARInterop.GetDirectionalLight(); if (!currentLight) { GameObject currentLightObj = new GameObject("Directional light"); currentLight = currentLightObj.AddComponent <Light>(); currentLight.type = LightType.Directional; } // reset light position & rotation currentLight.transform.position = Vector3.zero; currentLight.transform.rotation = Quaternion.Euler(40f, 40f, 0f); DontDestroyOnLoad(currentLight.gameObject); // set light parameters //currentLight.lightmapBakeType = LightmapBakeType.Mixed; currentLight.color = new Color32(255, 254, 244, 255); // add the ar-light component currentLight.gameObject.AddComponent <MultiARDirectionalLight>(); // get ar-data MultiARInterop.MultiARData arData = arManager ? arManager.GetARData() : null; if (arManager && arManager.usePointCloudData) { arData.pointCloudData = new Vector3[MultiARInterop.MAX_POINT_COUNT]; arData.pointCloudLength = 0; arData.pointCloudTimestamp = 0.0; } // create surface renderer if (arManager && arData != null) { arData.surfaceRendererRoot = new GameObject(); arData.surfaceRendererRoot.name = "SurfaceRenderer"; DontDestroyOnLoad(arData.surfaceRendererRoot); } // interface is initialized isInitialized = true; //Debug.Log("InitArCore finished."); }
/// <summary> /// Unity's Awake() method. /// </summary> public void Awake() { _currentOcclusionTransparency = OcclusionTransparency; Debug.Assert(OcclusionShader != null, "Occlusion Shader parameter must be set."); _depthMaterial = new Material(OcclusionShader); _depthMaterial.SetFloat("_OcclusionTransparency", _currentOcclusionTransparency); _depthMaterial.SetFloat("_OcclusionOffsetMeters", OcclusionOffset); _depthMaterial.SetFloat("_TransitionSize", TransitionSize); // Default texture, will be updated each frame. _depthTexture = new Texture2D(2, 2); _depthTexture.filterMode = FilterMode.Bilinear; _depthMaterial.SetTexture(_currentDepthTexturePropertyName, _depthTexture); _camera = Camera.main; _camera.depthTextureMode |= DepthTextureMode.Depth; _depthBuffer = new CommandBuffer(); _depthBuffer.name = "Auxilary occlusion textures"; // Creates the occlusion map. int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap"); _depthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear); // Pass #0 renders an auxilary buffer - occlusion map that indicates the // regions of virtual objects that are behind real geometry. _depthBuffer.Blit( BuiltinRenderTextureType.CameraTarget, occlusionMapTextureID, _depthMaterial, /*pass=*/ 0); // Blurs the occlusion map. _depthBuffer.SetGlobalTexture("_OcclusionMapBlurred", occlusionMapTextureID); _camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, _depthBuffer); _camera.AddCommandBuffer(CameraEvent.AfterGBuffer, _depthBuffer); _backgroundRenderer = FindObjectOfType <ARCoreBackgroundRenderer>(); if (_backgroundRenderer == null) { Debug.LogError("BackgroundTextureProvider requires ARCoreBackgroundRenderer " + "anywhere in the scene."); return; } _backgroundBuffer = new CommandBuffer(); _backgroundBuffer.name = "Camera texture"; _backgroundTextureID = Shader.PropertyToID(BackgroundTexturePropertyName); _backgroundBuffer.GetTemporaryRT(_backgroundTextureID, /*width=*/ -1, /*height=*/ -1, /*depthBuffer=*/ 0, FilterMode.Bilinear); var material = _backgroundRenderer.BackgroundMaterial; if (material != null) { _backgroundBuffer.Blit(material.mainTexture, _backgroundTextureID, material); } _backgroundBuffer.SetGlobalTexture( BackgroundTexturePropertyName, _backgroundTextureID); _camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, _backgroundBuffer); _camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, _backgroundBuffer); }