/// <summary>
 /// Updates the preview camera with the camera model, and displays the rendered view in the preview window.
 /// </summary>
 /// <param name="useFullResolution"></param> Whether to use the full resolution (capture) or one limited for preview (preview window).
 public void UpdatePreviewCameraModel(bool useFullResolution)
 {
     // The preview camera manager, and its camera, need to have been initialized in a previous step.
     if (_previewCameraManager != null && _previewCameraManager.previewCamera != null)
     {
         // Update the preview camera's camera model, and render the preview image.
         CameraModel cameraParams = cameraSetup.cameraModels[cameraSetup.previewIndex];
         _previewCameraManager.UpdateCameraModel(cameraParams, useFullResolution);
         _previewCameraManager.RenderPreviewToTarget(ref _previewCameraManager.targetTexture, false);
         int previewMaxIndex = cameraSetup.cameraModels.Length - 1;
         PreviewWindow.DisplayImage(_colorCallerName, _previewCameraManager.targetTexture, previewMaxIndex);
         // If depth data, or mesh data, is to be acquired, display a depth preview.
         if (_acquireDepthData || _copyGlobalMesh)
         {
             // Render actual depth into a precise depth texture.
             GeneralToolkit.CreateRenderTexture(ref _targetDepthTexture, cameraParams.pixelResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
             _previewCameraManager.RenderPreviewToTarget(ref _targetDepthTexture, true);
             // Encode the depth texture into a color texture, using a colormap suited for visualization.
             if (_distanceToColorMat == null)
             {
                 _distanceToColorMat = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor);
             }
             _distanceToColorMat.SetInt(shaderNameIsPrecise, 0);
             GeneralToolkit.CreateRenderTexture(ref _distanceAsColorTexture, cameraParams.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
             Graphics.Blit(_targetDepthTexture, _distanceAsColorTexture, _distanceToColorMat);
             // Display the texture in the preview window.
             PreviewWindow.DisplayImage(_depthCallerName, _distanceAsColorTexture, previewMaxIndex);
             // Reset the active render texture.
             RenderTexture.active = null;
         }
     }
 }
        /// <summary>
        /// Fetches the global mesh and initializes the camera and materials.
        /// </summary>
        private void InitializePerCall()
        {
            // Deactivate any other renderer in the scene.
            _deactivatedRendererGOs = GeneralToolkit.DeactivateOtherActiveComponents <Renderer>(gameObject);
            // Create a preview camera manager and initialize it with the camera model's pose and parameters.
            _previewCameraModel = CameraModel.CreateCameraModel();
            _previewCameraModel.transform.position = Vector3.zero;
            _previewCameraModel.transform.rotation = Quaternion.identity;
            _previewCameraModel.fieldOfView        = 60f * Vector2.one;
            float focalLength = Camera.FieldOfViewToFocalLength(_previewCameraModel.fieldOfView.x, 1f);

            _previewCameraManager = new GameObject("Preview Camera Manager").AddComponent <PreviewCameraManager>();
            Transform previewCameraTransform = new GameObject("Preview Camera").transform;

            GeneralToolkit.CreateRenderTexture(ref _previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
            _previewCameraManager.CreatePreviewCamera(_previewCameraManager.gameObject, previewCameraTransform, _previewCameraModel);
            _previewCameraManager.previewCamera.clearFlags      = CameraClearFlags.Color;
            _previewCameraManager.previewCamera.backgroundColor = Color.clear;
            // Create the materials.
            _renderToTextureMapMat = new Material(GeneralToolkit.shaderProcessingGlobalTextureMap);
            _renderToTextureMapMat.SetFloat(_shaderNameFocalLength, focalLength);
            _normalizeByAlphaMat = new Material(GeneralToolkit.shaderNormalizeByAlpha);
            // Initialize the helper object for ULR.
            _helperULR = gameObject.AddComponent <Rendering.Helper_ULR>();
            _helperULR.Reset();
            _helperULR.InitializeLinks();
            _helperULR.blendCamCount         = Rendering.Helper_ULR.maxBlendCamCount;
            _helperULR.numberOfSourceCameras = PMColorTextureArray.colorData.depth;
            _helperULR.CreateULRBuffersAndArrays();
            _helperULR.InitializeBlendingMaterialParameters(ref _renderToTextureMapMat);
            _helperULR.currentBlendingMaterial = _renderToTextureMapMat;
            _helperULR.initialized             = true;
        }
Example #3
0
 /// <summary>
 /// Updates the preview camera with a given set of camera models.
 /// </summary>
 /// <param name="sourceCameraModel"></param> The camera model with which to update the preview camera.
 public void UpdateCameraModel(CameraModel sourceCameraModel)
 {
     if (previewCamera != null)
     {
         // Store the camera model for further use.
         _cameraModel = sourceCameraModel;
         // Set up the camera's parameters and target texture from the given camera model.
         previewCamera.targetTexture = null;
         _cameraModel.TransferParametersToCamera(ref previewCamera);
         GeneralToolkit.CreateRenderTexture(ref targetTexture, _cameraModel.pixelResolution, targetTexture.depth, targetTexture.format, targetTexture.sRGB == false, targetTexture.filterMode, targetTexture.wrapMode);
         previewCamera.targetTexture = targetTexture;
     }
 }
 /// <inheritdoc/>
 public override void Selected()
 {
     base.Selected();
     // If needed, initialize the preview camera.
     if (_previewCameraManager.previewCamera == null)
     {
         Transform previewCameraTransform = new GameObject("PreviewCamera").transform;
         GeneralToolkit.CreateRenderTexture(ref _previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
         _previewCameraManager.CreatePreviewCamera(gameObject, previewCameraTransform, cameraModel);
     }
     // Notify the preview window that this object will send images for preview.
     PreviewWindow.AddCaller(this, _previewCallerName);
     // Update the camera model and display the rendered preview.
     UpdateCameraModel();
 }
        /// <summary>
        /// Initializes the blending material.
        /// </summary>
        private void InitializeMaterial()
        {
            // Create the blending material from the corresponding shader.
            blendingMaterial = new Material(GeneralToolkit.shaderRenderingDiskBlendedPerViewMeshes);
            // Store the color data.
            blendingMaterial.SetTexture(ColorTextureArray.shaderNameColorData, PMColorTextureArray.colorData);
            // Create two sets of textures: the target textures (rendered to every frame) and the stored textures (read from every frame).
            Vector2Int displayResolution = GeneralToolkit.GetCurrentDisplayResolution();

            GeneralToolkit.CreateRenderTexture(ref _targetColorTexture, displayResolution, 0, RenderTextureFormat.DefaultHDR, false, FilterMode.Point, TextureWrapMode.Clamp);
            GeneralToolkit.CreateRenderTexture(ref _targetDepthTexture, displayResolution, 24, RenderTextureFormat.Depth, true, FilterMode.Point, TextureWrapMode.Clamp);
            GeneralToolkit.CreateRenderTexture(ref _storedColorTexture, displayResolution, 0, RenderTextureFormat.DefaultHDR, false, FilterMode.Point, TextureWrapMode.Clamp);
            GeneralToolkit.CreateRenderTexture(ref _storedDepthTexture, displayResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
            blendingMaterial.SetTexture(_shaderNameStoredColorTexture, _storedColorTexture);
            blendingMaterial.SetTexture(_shaderNameStoredDepthTexture, _storedDepthTexture);
        }
        /// <summary>
        /// Computes and stores a texture map for the current submesh.
        /// </summary>
        private void ComputeAndStoreTextureMap()
        {
            // Check if the asset has already been processed.
            string assetName              = textureMapAssetPrefix + GeneralToolkit.ToString(_submeshIndex);
            string bundledAssetName       = dataHandler.GetBundledAssetName(this, assetName);
            string textureMapRelativePath = Path.Combine(GeneralToolkit.tempDirectoryRelativePath, bundledAssetName + ".asset");

            if (dataHandler.IsAssetAlreadyProcessed(textureMapRelativePath))
            {
                return;
            }
            // Render to the preview camera a first time to initialize all buffers correctly.
            _previewCameraModel.pixelResolution = _textureMapResolution;
            _previewCameraManager.UpdateCameraModel(_previewCameraModel, true);
            _previewCameraManager.RenderPreviewToTarget(ref _previewCameraManager.targetTexture, false);
            // Render the mesh to the preview camera's target texture.
            Camera.onPreCull += DrawSubmeshAsTextureMapWithCamera;
            _previewCameraManager.RenderPreviewToTarget(ref _previewCameraManager.targetTexture, false);
            Camera.onPreCull -= DrawSubmeshAsTextureMapWithCamera;
            // Normalize the RGB channels by the alpha channel.
            RenderTexture tempTex = new RenderTexture(1, 1, 0);

            GeneralToolkit.CreateRenderTexture(ref tempTex, _textureMapResolution, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
            Graphics.Blit(_previewCameraManager.targetTexture, tempTex, _normalizeByAlphaMat);
            // Apply a morphological dilation to better handle seams in the texture map..
            GeneralToolkit.RenderTextureApplyMorphologicalDilation(ref tempTex, _textureMapResolution.x / 200, ImageProcessingKernelType.Box, false);
            // Copy the render texture to an output texture.
            Texture2D outTex = new Texture2D(1, 1);

            GeneralToolkit.CreateTexture2D(ref outTex, _textureMapResolution, TextureFormat.RGB24, false, FilterMode.Bilinear, TextureWrapMode.Clamp, true);
            GeneralToolkit.CopyRenderTextureToTexture2D(tempTex, ref outTex);
            outTex.filterMode = FilterMode.Bilinear;
            outTex.anisoLevel = 3;
            // Destroy created objects.
            DestroyImmediate(tempTex);
            // Save a copy as a png file for visualization.
            string copyName = DataHandler.GetBundledAssetPrefixFromType(this.GetType()) + assetName + ".png";

            GeneralToolkit.SaveTexture2DToPNG(outTex, Path.Combine(GeneralToolkit.tempDirectoryAbsolutePath, copyName));
            // Create an asset from the created texture map.
            AssetDatabase.CreateAsset(outTex, textureMapRelativePath);
            AssetDatabase.Refresh();
            // Set the created texture in the final array.
            Texture2D texAsset = AssetDatabase.LoadAssetAtPath <Texture2D>(textureMapRelativePath);

            textureMaps[_submeshIndex] = (Texture2D)Instantiate(texAsset);
        }
Example #7
0
 /// <summary>
 /// Coroutine that renders per-view meshes from the given depth texture array.
 /// </summary>
 /// <returns></returns>
 private IEnumerator StorePerViewMeshesCoroutine()
 {
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
     // Initialize the compute shader's properties.
     PMPerViewMeshesQSTR.InitializePerCall();
     // Create a mesh for each source depth map.
     for (int sourceCamIndex = 0; sourceCamIndex < cameraSetup.cameraModels.Length; sourceCamIndex++)
     {
         // Check if the asset has already been processed.
         string bundledAssetName = dataHandler.GetBundledAssetName(PMPerViewMeshesQSTR, PerViewMeshesQSTR.perViewMeshAssetPrefix + sourceCamIndex);
         string meshRelativePath = Path.Combine(GeneralToolkit.tempDirectoryRelativePath, bundledAssetName + ".asset");
         if (dataHandler.IsAssetAlreadyProcessed(meshRelativePath))
         {
             continue;
         }
         // Update the progress bar, and enable the user to cancel the process.
         PMPerViewMeshesQSTR.DisplayAndUpdateCancelableProgressBar();
         if (GeneralToolkit.progressBarCanceled)
         {
             processingCaller.processingCanceled = true;
             break;
         }
         // Update the camera model.
         PMPerViewMeshesQSTR.cameraModel = cameraSetup.cameraModels[sourceCamIndex];
         // Initialize the distance map texture, and load the depth data into it.
         PMPerViewMeshesQSTR.InitializeDistanceMap();
         Vector2Int    distanceMapResolution  = new Vector2Int(PMPerViewMeshesQSTR.distanceMap.width, PMPerViewMeshesQSTR.distanceMap.height);
         RenderTexture depthTextureArraySlice = new RenderTexture(1, 1, 0);
         GeneralToolkit.CreateRenderTexture(ref depthTextureArraySlice, distanceMapResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point);
         Graphics.Blit(PMDepthTextureArray.depthData, depthTextureArraySlice, sourceCamIndex, 0);
         GeneralToolkit.CopyRenderTextureToTexture2D(depthTextureArraySlice, ref PMPerViewMeshesQSTR.distanceMap);
         DestroyImmediate(depthTextureArraySlice);
         // Compute a mesh from the distance map.
         Mesh meshAsset;
         PMPerViewMeshesQSTR.ComputeMesh(out meshAsset);
         // Save this mesh as an asset.
         GeneralToolkit.CreateAndUnloadAsset(meshAsset, meshRelativePath);
         yield return(null);
     }
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
 }
        /// <summary>
        /// Creates a mesh based on the camera's Z-buffer. Overwrites previously created mesh.
        /// </summary>
        public virtual void CreateMeshFromZBuffer()
        {
            // Destroy the previous mesh.
            DestroyMesh();
            // Initialize the quadtree mesh processing method with the camera parameters.
            CameraModel cameraModel = GetCameraModel();

            _geometryProcessingMethod.InitializePerCall();
            _geometryProcessingMethod.cameraModel = cameraModel;
            _geometryProcessingMethod.InitializeDistanceMap();
            // Initialize the visualization texture.
            GeneralToolkit.CreateRenderTexture(ref _visualizationTexture, cameraModel.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
            // Provides the depth texture to use as input to the geometry processing method.
            ProvideDepthTextureToGeometryProcessingMethod();
            // Initialize the material used to display the mesh in the Scene view.
            _displayMaterial = new Material(GeneralToolkit.shaderUnlitTexture);
            _displayMaterial.SetTexture("_MainTex", _visualizationTexture);
            // Use the processing method to generate a mesh from the provided depth texture.
            GenerateDepthMapMesh();
        }
Example #9
0
        /// <summary>
        /// Loads the selected source image as preview.
        /// </summary>
        public void LoadSourceImageAsPreview()
        {
            if (!Application.isPlaying || _previewSourceImagesLoader == null || _previewSourceImagesLoader.colorData == null ||
                cameraSetup.previewIndex == _lastLoadedPreviewIndex || cameraSetup.cameraModels == null || cameraSetup.cameraModels.Length < 1)
            {
                return;
            }
            _lastLoadedPreviewIndex = cameraSetup.previewIndex;
            if (previewSourceTexture != null)
            {
                DestroyImmediate(previewSourceTexture);
            }
            Vector2Int previewResolution = PreviewWindow.GetPreviewResolution(cameraSetup.cameraModels[_lastLoadedPreviewIndex].pixelResolution);

            GeneralToolkit.CreateRenderTexture(ref previewSourceTexture, previewResolution, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point);
            Graphics.Blit(_previewSourceImagesLoader.colorData, previewSourceTexture, _lastLoadedPreviewIndex, 0);
            int previewMaxIndex = cameraSetup.cameraModels.Length - 1;

            PreviewWindow.DisplayImage(_sourceCallerName, previewSourceTexture, previewMaxIndex);
        }
        /// <summary>
        /// Converts the camera's Z-buffer to a color texture.
        /// </summary>
        private void ConvertZBufferToColorTexture()
        {
            // Initialize a RFloat texture to store the camera's depth information.
            RenderTexture depthTexture = new RenderTexture(1, 1, 0);

            GeneralToolkit.CreateRenderTexture(ref depthTexture, cameraModel.pixelResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
            // Render the preview camera's depth to this texture.
            _previewCameraManager.RenderPreviewToTarget(ref depthTexture, true);
            // Initialize the output RGB color texture.
            GeneralToolkit.CreateRenderTexture(ref _distanceAsColorTexture, cameraModel.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
            // Convert the depth information as color into this texture.
            Material distanceToColorMat = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor);

            Graphics.Blit(depthTexture, _distanceAsColorTexture, distanceToColorMat);
            // Create a visualization texture, showing the depth information with a visual color map.
            distanceToColorMat.SetInt(Acquisition.Acquisition.shaderNameIsPrecise, 0);
            Graphics.Blit(depthTexture, _visualizationTexture, distanceToColorMat);
            // Destroy the created temporary objects.
            DestroyImmediate(depthTexture);
            DestroyImmediate(distanceToColorMat);
        }
 /// <summary>
 /// Creates a preview camera from the camera prefab.
 /// </summary>
 public void CreatePreviewCameraFromPrefab()
 {
     // If it is not already done, add a preview camera to the preview camera manager, initialize its target texture, and update its camera model.
     if (_previewCameraManager.previewCamera == null)
     {
         Transform previewCameraTransform;
         if (_cameraPrefab != null)
         {
             previewCameraTransform = GameObject.Instantiate(_cameraPrefab).transform;
         }
         else
         {
             previewCameraTransform = new GameObject("PreviewCamera").transform;
             Camera previewCamera = previewCameraTransform.gameObject.AddComponent <Camera>();
             previewCamera.stereoTargetEye = StereoTargetEyeMask.None;
         }
         previewCameraTransform.name = "Preview_" + previewCameraTransform.name;
         GeneralToolkit.CreateRenderTexture(ref _previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
         CameraModel previewCameraModel = cameraSetup.cameraModels[cameraSetup.previewIndex];
         _previewCameraManager.CreatePreviewCamera(gameObject, previewCameraTransform, previewCameraModel);
         UpdatePreviewCameraModel(false);
     }
 }
        /// <summary>
        /// Coroutine that renders depth maps for each view using a given global mesh, and stores this information as a depth texture array.
        /// </summary>
        /// <returns></returns>
        private IEnumerator StoreDepthMapTextureArrayCoroutine()
        {
            // Get the processed asset's name and path in the bundle.
            string bundledAssetName      = GetBundledAssetName(depthMapsAssetName);
            string depthDataPathRelative = GetAssetPathRelative(bundledAssetName);

            // Check if the asset has already been processed.
            if (!dataHandler.IsAssetAlreadyProcessed(depthDataPathRelative))
            {
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
                // Create and initialize a temporary preview camera manager aimed at storing depth data.
                PreviewCameraManager previewCameraManager   = new GameObject("TempPreviewCameraManager").AddComponent <PreviewCameraManager>();
                Transform            previewCameraTransform = new GameObject("TempPreviewCamera").transform;
                GeneralToolkit.CreateRenderTexture(ref previewCameraManager.targetTexture, Vector2Int.one, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
                previewCameraManager.CreatePreviewCamera(previewCameraManager.gameObject, previewCameraTransform, cameraSetup.cameraModels[0]);
                // Instantiate the mesh as a set of submeshes, provided with the default material.
                Material     defaultMat = new Material(GeneralToolkit.shaderStandard);
                GameObject[] submeshGOs = new GameObject[PMGlobalMeshEF.globalMesh.subMeshCount];
                for (int i = 0; i < submeshGOs.Length; i++)
                {
                    submeshGOs[i] = new GameObject("TempMesh_" + i);
                    submeshGOs[i].transform.parent = previewCameraManager.transform;
                    submeshGOs[i].AddComponent <MeshFilter>().sharedMesh = PMGlobalMeshEF.globalMesh;
                    Material[] materials = new Material[submeshGOs.Length];
                    materials[i] = defaultMat;
                    submeshGOs[i].AddComponent <MeshRenderer>().materials = materials;
                }
                // Create an empty texture array in which to store the depth data.
                Vector2Int arrayResolution; int arrayDepth;
                ColorTextureArray.GetCorrectedPowerOfTwoForImages(cameraSetup.cameraModels, out arrayResolution, out arrayDepth);
                depthData = new Texture2DArray(1, 1, 1, TextureFormat.RGB24, false);
                GeneralToolkit.CreateTexture2DArray(ref depthData, arrayResolution, arrayDepth, TextureFormat.RGB24, false, FilterMode.Point, TextureWrapMode.Clamp, false);
                // Create a render texture in which to store RFloat depth data, with the array's resolution.
                RenderTexture arraySliceRFloatRenderTex = new RenderTexture(1, 1, 0);
                GeneralToolkit.CreateRenderTexture(ref arraySliceRFloatRenderTex, arrayResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
                // Create a material and render texture to encode the RFloat distance as a RGB color.
                Material      distanceToColorMat     = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor);
                RenderTexture distanceAsColorTexture = new RenderTexture(1, 1, 0);
                GeneralToolkit.CreateRenderTexture(ref distanceAsColorTexture, arrayResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
                // Create a texture in which to store the RGB-encoded distance.
                Texture2D arraySliceRGBTex = new Texture2D(1, 1);
                GeneralToolkit.CreateTexture2D(ref arraySliceRGBTex, arrayResolution, TextureFormat.RGB24, true, FilterMode.Point, TextureWrapMode.Clamp, false);
                // Create a depth map in each layer of the texture array, corresponding to each source camera.
                for (int i = 0; i < arrayDepth; i++)
                {
                    // Update the progress bar, and enable the user to cancel the process.
                    DisplayAndUpdateCancelableProgressBar();
                    if (GeneralToolkit.progressBarCanceled)
                    {
                        processingCaller.processingCanceled = true;
                        break;
                    }
                    // Set the preview camera manager's camera model to the current source camera.
                    previewCameraManager.UpdateCameraModel(cameraSetup.cameraModels[i]);
                    // Render the depth data seen by this camera as an RFloat texture.
                    previewCameraManager.RenderPreviewToTarget(ref previewCameraManager.targetTexture, true);
                    // Resize the rendered texture to the output array's resolution.
                    Graphics.Blit(previewCameraManager.targetTexture, arraySliceRFloatRenderTex);
                    // Convert the resized RFloat texture to an RGB encoding.
                    Graphics.Blit(arraySliceRFloatRenderTex, distanceAsColorTexture, distanceToColorMat);
                    // Store the RGB color texture into the texture array.
                    GeneralToolkit.CopyRenderTextureToTexture2D(distanceAsColorTexture, ref arraySliceRGBTex);
                    depthData.SetPixels(arraySliceRGBTex.GetPixels(), i);
                    depthData.Apply();
                    yield return(null);
                }
                // If the user has not canceled the process, continue.
                if (!GeneralToolkit.progressBarCanceled)
                {
                    // Create an asset from this texture array.
                    AssetDatabase.CreateAsset(depthData, depthDataPathRelative);
                    AssetDatabase.Refresh();
                }
                // Destroy the created textures and conversion material.
                DestroyImmediate(arraySliceRGBTex);
                DestroyImmediate(distanceAsColorTexture);
                DestroyImmediate(distanceToColorMat);
                DestroyImmediate(arraySliceRFloatRenderTex);
                // Destroy the created meshes and default material.
                DestroyImmediate(defaultMat);
                foreach (GameObject submeshGO in submeshGOs)
                {
                    DestroyImmediate(submeshGO);
                }
                // Destroy the preview camera manager.
                previewCameraManager.DestroyPreviewCamera();
                DestroyImmediate(previewCameraManager.gameObject);
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
            }
            Texture2DArray depthDataAsset = AssetDatabase.LoadAssetAtPath <Texture2DArray>(depthDataPathRelative);

            depthData = (Texture2DArray)Instantiate(depthDataAsset);
        }
Example #13
0
        /// <summary>
        /// Renders the preview camera (with associated camera model) to a specified target texture.
        /// </summary>
        /// <param name="destTexture"></param> The destination texture in which to render the preview camera.
        /// <param name="isDepth"></param> True if the preview camera should render depth instead of color, false otherwise.
        public void RenderPreviewToTarget(ref RenderTexture destTexture, bool isDepth)
        {
            // Only render the novel view if there is a preview camera.
            if (previewCamera == null)
            {
                return;
            }
            // Activate the preview camera.
            previewCamera.enabled = true;
            CameraClearFlags clearFlags = previewCamera.clearFlags;
            // If the capture is for a depth map, not a color image, set up the corresponding process.
            List <MonoBehaviour> deactivatedComponents = new List <MonoBehaviour>();

            if (isDepth)
            {
                // To do so, deactivate useless components that may interfere with the computation of depth, such as image effects.
                MonoBehaviour[] components = previewCamera.GetComponents <MonoBehaviour>();
                foreach (MonoBehaviour component in components)
                {
                    if (component != previewCamera && component.isActiveAndEnabled)
                    {
                        deactivatedComponents.Add(component);
                        component.enabled = false;
                    }
                }
                // Set a replacement shader that renders distance from the camera.
                Vector3 xyz = previewCamera.transform.position;
                Shader.SetGlobalVector("_CameraWorldXYZ", new Vector4(xyz.x, xyz.y, xyz.z, 0f));
                Shader.SetGlobalVector("_DistanceRange", new Vector4(_cameraModel.distanceRange.x, _cameraModel.distanceRange.y, 0f, 0f));
                previewCamera.clearFlags      = CameraClearFlags.Color;
                previewCamera.backgroundColor = Color.white;
                previewCamera.SetReplacementShader(GeneralToolkit.shaderAcquisitionRenderDistance, string.Empty);
            }
            // If the capture is omnidirectional, set up the corresponding process.
            if (_cameraModel.isOmnidirectional)
            {
                // Render the camera to a cubemap.
                RenderTexture cubemap   = new RenderTexture(1, 1, 0);
                int           cubeWidth = destTexture.width / 4;
                GeneralToolkit.CreateRenderTexture(ref cubemap, new Vector2Int(cubeWidth, cubeWidth), destTexture.depth, destTexture.format, destTexture.sRGB == false, destTexture.filterMode, destTexture.wrapMode);
                cubemap.dimension = UnityEngine.Rendering.TextureDimension.Cube;
                previewCamera.stereoSeparation = 0f;
                previewCamera.RenderToCubemap(cubemap, 63, Camera.MonoOrStereoscopicEye.Left);
                // Convert the cubemap to a single equirectangular texture.
                cubemap.ConvertToEquirect(destTexture);
                RenderTexture.DestroyImmediate(cubemap);
            }
            // Otherwise, simply render the perspective camera.
            else
            {
                previewCamera.targetTexture = destTexture;
                previewCamera.Render();
            }
            // Reset the preview camera, and any deactivated components.
            previewCamera.targetTexture = targetTexture;
            previewCamera.clearFlags    = clearFlags;
            previewCamera.ResetReplacementShader();
            foreach (MonoBehaviour component in deactivatedComponents)
            {
                component.enabled = true;
            }
            RenderTexture.active = null;
            // Deactivate the preview camera.
            previewCamera.enabled = false;
        }
        /// <summary>
        /// In play mode, loads the rendered view and the evaluation metric for preview.
        /// </summary>
        public void LoadRenderedViewAsPreview()
        {
            RenderTexture.active = null;
            if (_previewEvalTexture != null)
            {
                DestroyImmediate(_previewEvalTexture);
            }
            if (_previewRenderTexture != null)
            {
                DestroyImmediate(_previewRenderTexture);
            }
            // Check that there are camera models, that the application is playing, and that the object is active.
            if (processing.cameraSetup.cameraModels == null || !Application.isPlaying || !gameObject.activeInHierarchy || !_launched)
            {
                return;
            }
            int previewMaxIndex = processing.cameraSetup.cameraModels.Length - 1;
            // Get the camera model for this index.
            CameraModel tempCameraModel = CameraModel.CreateCameraModel();

            tempCameraModel.ParametersFromCameraModel(processing.cameraSetup.cameraModels[processing.cameraSetup.previewIndex]);
            tempCameraModel.pixelResolution = PreviewWindow.GetPreviewResolution(tempCameraModel.pixelResolution);
            // Display a preview of the rendered view.
            if (selectedBlendingMethod != null && selectedEvaluationMethod != null)
            {
                // Inform the blending method to exclude the source view if desired.
                if (selectedEvaluationMethod.excludeSourceView)
                {
                    selectedBlendingMethod.ExcludeSourceView(processing.cameraSetup.previewIndex);
                }
                // Create a preview camera manager and initialize it with the camera model's pose and parameters.
                PreviewCameraManager previewCameraManager   = new GameObject("Preview Camera Manager").AddComponent <PreviewCameraManager>();
                Transform            previewCameraTransform = new GameObject("Preview Camera").transform;
                GeneralToolkit.CreateRenderTexture(ref previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
                previewCameraManager.CreatePreviewCamera(previewCameraManager.gameObject, previewCameraTransform, tempCameraModel);
                previewCameraManager.previewCamera.clearFlags      = CameraClearFlags.Color;
                previewCameraManager.previewCamera.backgroundColor = Color.black;
                // Render the preview camera to a target texture, and display it in the preview window.
                previewCameraManager.RenderPreviewToTarget(ref previewCameraManager.targetTexture, false);
                GeneralToolkit.CreateRenderTexture(ref _previewRenderTexture, tempCameraModel.pixelResolution, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp);
                Graphics.Blit(previewCameraManager.targetTexture, _previewRenderTexture);
                PreviewWindow.DisplayImage(_renderCallerName, _previewRenderTexture, previewMaxIndex);
                // Destroy the preview camera manager.
                previewCameraManager.DestroyPreviewCamera();
                DestroyImmediate(previewCameraManager.gameObject);
                // Inform the blending method that it should no longer exclude the source view.
                selectedBlendingMethod.ExcludeSourceView(-1);
            }
            DestroyImmediate(tempCameraModel.gameObject);
            // Display the evaluation metric as an RGB color texture.
            if (selectedEvaluationMethod != null && selectedEvaluationMethod.evaluationMaterial != null)
            {
                // Use a shader to compute the evaluation metric for each pixel and display it as a color value.
                GeneralToolkit.CreateRenderTexture(ref _previewEvalTexture, tempCameraModel.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
                selectedEvaluationMethod.evaluationMaterial.SetTexture(EvaluationMethod.shaderNameTextureOne, processing.previewSourceTexture);
                selectedEvaluationMethod.evaluationMaterial.SetTexture(EvaluationMethod.shaderNameTextureTwo, _previewRenderTexture);
                Graphics.Blit(null, _previewEvalTexture, selectedEvaluationMethod.evaluationMaterial);
                // Display the created texture in the preview window.
                PreviewWindow.DisplayImage(_evalCallerName, _previewEvalTexture, previewMaxIndex);
            }
        }