Esempio n. 1
0
 /// <inheritdoc/>
 protected override IEnumerator ExecuteMethodCoroutine()
 {
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
     // Initialize the compute shader's properties.
     InitializePerCall();
     // Create a mesh for each source depth map.
     perViewMeshes = new Mesh[cameraSetup.cameraModels.Length];
     for (int sourceIndex = 0; sourceIndex < perViewMeshes.Length; sourceIndex++)
     {
         // Update the progress bar, and enable the user to cancel the process.
         DisplayAndUpdateCancelableProgressBar();
         if (GeneralToolkit.progressBarCanceled)
         {
             processingCaller.processingCanceled = true;
             break;
         }
         // Process the depth map.
         ProcessDepthImage(sourceIndex);
         yield return(null);
     }
     //Releases the compute shader.
     ReleasePerCall();
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
 }
        /// <summary>
        /// Coroutine that merges the color data into a single texture map per submesh, and stores this information as a texture asset.
        /// </summary>
        /// <returns></returns>
        private IEnumerator StoreGlobalTextureMapCoroutine()
        {
            // Reset the progress bar.
            GeneralToolkit.ResetCancelableProgressBar(true, false);
            // Per call, fetch the assets and initialize a camera and materials.
            InitializePerCall();
            yield return(null);

            // Create and save a texture map for each submesh of the global mesh.
            int textureMapCount = PMGlobalMeshEF.globalMesh.subMeshCount;

            textureMaps = new Texture2D[textureMapCount];
            for (_submeshIndex = 0; _submeshIndex < textureMapCount; _submeshIndex++)
            {
                // Check if the progress bar has been canceled.
                if (GeneralToolkit.progressBarCanceled)
                {
                    processingCaller.processingCanceled = true;
                    break;
                }
                // Update the message on the progress bar.
                DisplayAndUpdateCancelableProgressBar();
                // Per submesh, determine the texture map resolution, compute the texture map, and save it as an asset.
                ComputeTextureMapResolution();
                ComputeAndStoreTextureMap();
                yield return(null);
            }
            // Destroy all objects created per call.
            ClearPerCall();
            // Reset the progress bar.
            GeneralToolkit.ResetCancelableProgressBar(true, false);
        }
Esempio n. 3
0
        /// <summary>
        /// Coroutine that performs automatic retopology on a given mesh using the Instant Meshes implementation.
        /// </summary>
        /// <param name="caller"></param> The object calling this method.
        /// <param name="workspace"></param> The workspace from which to launch the command.
        /// <param name="inputFilePath"></param> The full path to the input .PLY or .OBJ file.
        /// <param name="outputFilePath"></param> The full path to the output .PLY or .OBJ file.
        /// <param name="blenderHelper"></param> The helper component for Blender.
        /// <returns></returns>
        public static IEnumerator RunInstantMeshesCoroutine(MonoBehaviour caller, string workspace, string inputFilePath, string outputFilePath, BlenderHelper blenderHelper)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // Initialize the command parameters.
            bool displayProgressBar = true;
            bool stopOnError        = true;

            string[] progressBarParams = new string[3];
            progressBarParams[0] = "2";
            progressBarParams[1] = "Automatic retopology";
            progressBarParams[2] = "Processing canceled by user.";
            // Prepare the command.
            string formattedExePath = InstantMeshesSettings.formattedInstantMeshesExePath;
            string command          = "CALL " + formattedExePath;

            command += " --output " + GeneralToolkit.FormatPathForCommand(outputFilePath);
            command += " --deterministic --boundaries --rosy 6 --posy 6";
            // If there is a Blender helper, use the determined mesh face count to define the desired face count.
            if (blenderHelper != null && blenderHelper.meshFaceCount != -1)
            {
                command += " --faces " + GeneralToolkit.ToString(blenderHelper.meshFaceCount);
            }
            // Launch the command.
            command += " " + GeneralToolkit.FormatPathForCommand(inputFilePath);
            yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(InstantMeshesConnector), command, workspace, displayProgressBar, null, null, stopOnError, progressBarParams)));

            // If there is a Blender helper, update the mesh's face count.
            if (blenderHelper != null)
            {
                blenderHelper.CheckOBJMeshInfo(workspace);
            }
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
Esempio n. 4
0
 /// <summary>
 /// Initializes a coroutine that launches a Blender python command.
 /// </summary>
 /// <param name="clearConsole"></param> True if the console should be cleared, false otherwise.
 /// <param name="displayProgressBar"></param> Outputs whether to display a progress bar.
 /// <param name="stopOnError"></param> Outputs whether to stop on error.
 /// <param name="progressBarParams"></param> Outputs paramters for the progress bar.
 private static void InitBlenderCoroutine(bool clearConsole, out bool displayProgressBar, out bool stopOnError, out string[] progressBarParams)
 {
     // Indicate to the user that the process has started.
     GeneralToolkit.ResetCancelableProgressBar(true, clearConsole);
     // Initialize the command parameters.
     displayProgressBar   = true;
     stopOnError          = true;
     progressBarParams    = new string[3];
     progressBarParams[0] = "2";
     progressBarParams[2] = "Processing canceled by user.";
 }
        /// <summary>
        /// Coroutine that runs the dense reconstruction process.
        /// </summary>
        /// <param name="caller"></param> The object calling this method.
        /// <param name="workspace"></param> The workspace from which to perform this method.
        /// <returns></returns>
        public static IEnumerator RunDenseReconstructionCoroutine(MonoBehaviour caller, string workspace)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // Initialize the command parameters.
            bool displayProgressBar = true;
            bool stopOnError        = true;

            string[] progressBarParams = new string[3];
            int      maxStep           = 3;

            progressBarParams[0] = GeneralToolkit.ToString(maxStep);
            progressBarParams[2] = "Processing canceled by user.";
            // Launch the different steps of the reconstruction process.
            for (int step = 1; step <= maxStep; step++)
            {
                // Step one: launch stereo.
                if (step == 1)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Stereo", false, step, maxStep);
                    yield return(caller.StartCoroutine(RunStereoCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams)));
                }
                // Step two: launch fusion.
                else if (step == 2)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Fusion", false, step, maxStep);
                    yield return(caller.StartCoroutine(RunFusionCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams)));
                }
                // Step three: launch Delaunay meshing, which exports the mesh as a .PLY file.
                else if (step == 3)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Delaunay meshing", false, step, maxStep);
                    yield return(caller.StartCoroutine(RunDelaunayMeshingCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams)));
                }
                // For each step, continue only if the user does not cancel the process.
                if (GeneralToolkit.progressBarCanceled)
                {
                    break;
                }
            }
            // Change the data directory to the one created in the dense folder.
            if (!GeneralToolkit.progressBarCanceled)
            {
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(COLMAPConnector), "Dense reconstruction was a success."));
            }
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
        /// <summary>
        /// Coroutine that applies the Smart UV Project algorithm to the given .OBJ file.
        /// </summary>
        /// <param name="caller"></param> The Blender helper calling this method.
        /// <param name="inputFilePath"></param> The full path to the input .OBJ file.
        /// <param name="outputFilePath"></param> The full path to the output .OBJ file.
        /// <returns></returns>
        public static IEnumerator RunSmartUVProjectOBJCoroutine(BlenderHelper caller, string inputFilePath, string outputFilePath)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // Initialize the coroutine parameters.
            bool displayProgressBar; bool stopOnError; string[] progressBarParams;

            InitBlenderCoroutineParams(true, out displayProgressBar, out stopOnError, out progressBarParams);
            progressBarParams[1] = "Smart UV Project on .OBJ";
            // Launch the command.
            string command = FormatBlenderCommand(_smartUVProjectOBJFileName, inputFilePath, outputFilePath);

            yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(BlenderConnector), command, _externalPythonScriptsDir, displayProgressBar, null, _harmlessWarnings, stopOnError, progressBarParams)));

            // Display the uv-mapped mesh in the Scene view.
            caller.DisplayMeshInSceneView(outputFilePath);
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
        // /// <summary>
        // /// Coroutine that checks an .OBJ's mesh information.
        // /// </summary>
        // /// <param name="caller"></param> The object calling this method.
        // /// <param name="inputFilePath"></param> The full path to the input .OBJ file.
        // /// <param name="storeFaceCount"></param> Action that stores the mesh's face count.
        // /// <returns></returns>
        // public static IEnumerator RunCheckOBJMeshInfoCoroutine(MonoBehaviour caller, string inputFilePath, System.Diagnostics.DataReceivedEventHandler storeFaceCount)
        // {
        //     // Indicate to the user that the process has started.
        //     GeneralToolkit.ResetCancelableProgressBar(true, false);
        //     // Initialize the coroutine parameters.
        //     bool displayProgressBar; bool stopOnError; string[] progressBarParams;
        //     InitBlenderCoroutineParams(false, out displayProgressBar, out stopOnError, out progressBarParams);
        //     progressBarParams[1] = "Check .OBJ mesh information";
        //     // Launch the command.
        //     string command = FormatBlenderCommand(_checkOBJMeshInfoFileName, inputFilePath);
        //     yield return caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(BlenderConnector), command, _externalPythonScriptsDir, displayProgressBar, storeFaceCount, _harmlessWarnings, stopOnError, progressBarParams));
        //     // Indicate to the user that the process has ended.
        //     GeneralToolkit.ResetCancelableProgressBar(false, false);
        // }

        /// <summary>
        /// Coroutine that simplifies the mesh in a .OBJ file using the decimation modifier.
        /// </summary>
        /// <param name="caller"></param> The Blender helper calling this method.
        /// <param name="inputFilePath"></param> The full path to the input .OBJ file.
        /// <param name="outputFilePath"></param> The full path to the output .OBJ file.
        /// <param name="storeFaceCount"></param> Action that stores the mesh's face count.
        /// <returns></returns>
        public static IEnumerator RunSimplifyOBJCoroutine(BlenderHelper caller, string inputFilePath, string outputFilePath, System.Diagnostics.DataReceivedEventHandler storeFaceCount)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // Initialize the coroutine parameters.
            bool displayProgressBar; bool stopOnError; string[] progressBarParams;

            InitBlenderCoroutineParams(true, out displayProgressBar, out stopOnError, out progressBarParams);
            progressBarParams[1] = "Convert .PLY to .OBJ";
            // Launch the command.
            string command = FormatBlenderCommand(_simplifyOBJFileName, inputFilePath, outputFilePath);

            yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(BlenderConnector), command, _externalPythonScriptsDir, displayProgressBar, storeFaceCount, _harmlessWarnings, stopOnError, progressBarParams)));

            // Display the simplified mesh in the Scene view.
            caller.DisplayMeshInSceneView(outputFilePath);
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
Esempio n. 8
0
 /// <summary>
 /// Coroutine that renders per-view meshes from the given depth texture array.
 /// </summary>
 /// <returns></returns>
 private IEnumerator StorePerViewMeshesCoroutine()
 {
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
     // Initialize the compute shader's properties.
     PMPerViewMeshesQSTR.InitializePerCall();
     // Create a mesh for each source depth map.
     for (int sourceCamIndex = 0; sourceCamIndex < cameraSetup.cameraModels.Length; sourceCamIndex++)
     {
         // Check if the asset has already been processed.
         string bundledAssetName = dataHandler.GetBundledAssetName(PMPerViewMeshesQSTR, PerViewMeshesQSTR.perViewMeshAssetPrefix + sourceCamIndex);
         string meshRelativePath = Path.Combine(GeneralToolkit.tempDirectoryRelativePath, bundledAssetName + ".asset");
         if (dataHandler.IsAssetAlreadyProcessed(meshRelativePath))
         {
             continue;
         }
         // Update the progress bar, and enable the user to cancel the process.
         PMPerViewMeshesQSTR.DisplayAndUpdateCancelableProgressBar();
         if (GeneralToolkit.progressBarCanceled)
         {
             processingCaller.processingCanceled = true;
             break;
         }
         // Update the camera model.
         PMPerViewMeshesQSTR.cameraModel = cameraSetup.cameraModels[sourceCamIndex];
         // Initialize the distance map texture, and load the depth data into it.
         PMPerViewMeshesQSTR.InitializeDistanceMap();
         Vector2Int    distanceMapResolution  = new Vector2Int(PMPerViewMeshesQSTR.distanceMap.width, PMPerViewMeshesQSTR.distanceMap.height);
         RenderTexture depthTextureArraySlice = new RenderTexture(1, 1, 0);
         GeneralToolkit.CreateRenderTexture(ref depthTextureArraySlice, distanceMapResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point);
         Graphics.Blit(PMDepthTextureArray.depthData, depthTextureArraySlice, sourceCamIndex, 0);
         GeneralToolkit.CopyRenderTextureToTexture2D(depthTextureArraySlice, ref PMPerViewMeshesQSTR.distanceMap);
         DestroyImmediate(depthTextureArraySlice);
         // Compute a mesh from the distance map.
         Mesh meshAsset;
         PMPerViewMeshesQSTR.ComputeMesh(out meshAsset);
         // Save this mesh as an asset.
         GeneralToolkit.CreateAndUnloadAsset(meshAsset, meshRelativePath);
         yield return(null);
     }
     // Reset the progress bar.
     GeneralToolkit.ResetCancelableProgressBar(true, false);
 }
Esempio n. 9
0
        /// <summary>
        /// Coroutine that performs automatic retopology on a given mesh using the Instant Meshes implementation.
        /// </summary>
        /// <param name="caller"></param> The Instant Meshes helper calling this method.
        /// <param name="workspace"></param> The workspace from which to launch the command.
        /// <param name="inputFilePath"></param> The full path to the input .PLY or .OBJ file.
        /// <param name="outputFilePath"></param> The full path to the output .PLY or .OBJ file.
        /// <param name="reduceVertexCount"></param> Whether to reduce the vertex count to the recommended value.
        /// <returns></returns>
        public static IEnumerator RunInstantMeshesCoroutine(InstantMeshesHelper caller, string workspace, string inputFilePath, string outputFilePath, bool reduceVertexCount)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // If the initial face count is needed, display the input mesh to get it.
            if (!reduceVertexCount)
            {
                caller.DisplayMeshInSceneView(inputFilePath);
            }
            // Initialize the command parameters.
            bool displayProgressBar = true;
            bool stopOnError        = true;

            string[] progressBarParams = new string[3];
            progressBarParams[0] = "2";
            progressBarParams[1] = "Automatic retopology";
            progressBarParams[2] = "Processing canceled by user.";
            // Prepare the command.
            string formattedExePath = InstantMeshesSettings.formattedInstantMeshesExePath;
            string command          = "CALL " + formattedExePath;

            command += " --output " + GeneralToolkit.FormatPathForCommand(outputFilePath);
            command += " --deterministic --boundaries --rosy 6 --posy 6";
            // If desired, use the determined mesh face count to define the desired face count.
            if (!reduceVertexCount)
            {
                command += " --faces " + GeneralToolkit.ToString(caller.loadedMeshFaceCount);
            }
            // Launch the command.
            command += " " + GeneralToolkit.FormatPathForCommand(inputFilePath);
            yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(InstantMeshesConnector), command, workspace, displayProgressBar, null, null, stopOnError, progressBarParams)));

            // Display the transformed mesh in the Scene view.
            caller.DisplayMeshInSceneView(outputFilePath);
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
        /// <summary>
        /// Coroutine that renders depth maps for each view using a given global mesh, and stores this information as a depth texture array.
        /// </summary>
        /// <returns></returns>
        private IEnumerator StoreDepthMapTextureArrayCoroutine()
        {
            // Get the processed asset's name and path in the bundle.
            string bundledAssetName      = GetBundledAssetName(depthMapsAssetName);
            string depthDataPathRelative = GetAssetPathRelative(bundledAssetName);

            // Check if the asset has already been processed.
            if (!dataHandler.IsAssetAlreadyProcessed(depthDataPathRelative))
            {
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
                // Create and initialize a temporary preview camera manager aimed at storing depth data.
                PreviewCameraManager previewCameraManager   = new GameObject("TempPreviewCameraManager").AddComponent <PreviewCameraManager>();
                Transform            previewCameraTransform = new GameObject("TempPreviewCamera").transform;
                GeneralToolkit.CreateRenderTexture(ref previewCameraManager.targetTexture, Vector2Int.one, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
                previewCameraManager.CreatePreviewCamera(previewCameraManager.gameObject, previewCameraTransform, cameraSetup.cameraModels[0]);
                // Instantiate the mesh as a set of submeshes, provided with the default material.
                Material     defaultMat = new Material(GeneralToolkit.shaderStandard);
                GameObject[] submeshGOs = new GameObject[PMGlobalMeshEF.globalMesh.subMeshCount];
                for (int i = 0; i < submeshGOs.Length; i++)
                {
                    submeshGOs[i] = new GameObject("TempMesh_" + i);
                    submeshGOs[i].transform.parent = previewCameraManager.transform;
                    submeshGOs[i].AddComponent <MeshFilter>().sharedMesh = PMGlobalMeshEF.globalMesh;
                    Material[] materials = new Material[submeshGOs.Length];
                    materials[i] = defaultMat;
                    submeshGOs[i].AddComponent <MeshRenderer>().materials = materials;
                }
                // Create an empty texture array in which to store the depth data.
                Vector2Int arrayResolution; int arrayDepth;
                ColorTextureArray.GetCorrectedPowerOfTwoForImages(cameraSetup.cameraModels, out arrayResolution, out arrayDepth);
                depthData = new Texture2DArray(1, 1, 1, TextureFormat.RGB24, false);
                GeneralToolkit.CreateTexture2DArray(ref depthData, arrayResolution, arrayDepth, TextureFormat.RGB24, false, FilterMode.Point, TextureWrapMode.Clamp, false);
                // Create a render texture in which to store RFloat depth data, with the array's resolution.
                RenderTexture arraySliceRFloatRenderTex = new RenderTexture(1, 1, 0);
                GeneralToolkit.CreateRenderTexture(ref arraySliceRFloatRenderTex, arrayResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp);
                // Create a material and render texture to encode the RFloat distance as a RGB color.
                Material      distanceToColorMat     = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor);
                RenderTexture distanceAsColorTexture = new RenderTexture(1, 1, 0);
                GeneralToolkit.CreateRenderTexture(ref distanceAsColorTexture, arrayResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp);
                // Create a texture in which to store the RGB-encoded distance.
                Texture2D arraySliceRGBTex = new Texture2D(1, 1);
                GeneralToolkit.CreateTexture2D(ref arraySliceRGBTex, arrayResolution, TextureFormat.RGB24, true, FilterMode.Point, TextureWrapMode.Clamp, false);
                // Create a depth map in each layer of the texture array, corresponding to each source camera.
                for (int i = 0; i < arrayDepth; i++)
                {
                    // Update the progress bar, and enable the user to cancel the process.
                    DisplayAndUpdateCancelableProgressBar();
                    if (GeneralToolkit.progressBarCanceled)
                    {
                        processingCaller.processingCanceled = true;
                        break;
                    }
                    // Set the preview camera manager's camera model to the current source camera.
                    previewCameraManager.UpdateCameraModel(cameraSetup.cameraModels[i]);
                    // Render the depth data seen by this camera as an RFloat texture.
                    previewCameraManager.RenderPreviewToTarget(ref previewCameraManager.targetTexture, true);
                    // Resize the rendered texture to the output array's resolution.
                    Graphics.Blit(previewCameraManager.targetTexture, arraySliceRFloatRenderTex);
                    // Convert the resized RFloat texture to an RGB encoding.
                    Graphics.Blit(arraySliceRFloatRenderTex, distanceAsColorTexture, distanceToColorMat);
                    // Store the RGB color texture into the texture array.
                    GeneralToolkit.CopyRenderTextureToTexture2D(distanceAsColorTexture, ref arraySliceRGBTex);
                    depthData.SetPixels(arraySliceRGBTex.GetPixels(), i);
                    depthData.Apply();
                    yield return(null);
                }
                // If the user has not canceled the process, continue.
                if (!GeneralToolkit.progressBarCanceled)
                {
                    // Create an asset from this texture array.
                    AssetDatabase.CreateAsset(depthData, depthDataPathRelative);
                    AssetDatabase.Refresh();
                }
                // Destroy the created textures and conversion material.
                DestroyImmediate(arraySliceRGBTex);
                DestroyImmediate(distanceAsColorTexture);
                DestroyImmediate(distanceToColorMat);
                DestroyImmediate(arraySliceRFloatRenderTex);
                // Destroy the created meshes and default material.
                DestroyImmediate(defaultMat);
                foreach (GameObject submeshGO in submeshGOs)
                {
                    DestroyImmediate(submeshGO);
                }
                // Destroy the preview camera manager.
                previewCameraManager.DestroyPreviewCamera();
                DestroyImmediate(previewCameraManager.gameObject);
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
            }
            Texture2DArray depthDataAsset = AssetDatabase.LoadAssetAtPath <Texture2DArray>(depthDataPathRelative);

            depthData = (Texture2DArray)Instantiate(depthDataAsset);
        }
Esempio n. 11
0
        /// <summary>
        /// Processes color and/or depth data.
        /// </summary>
        /// <returns></returns>
        private IEnumerator ProcessDataCoroutine()
        {
            // Inform the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Processing), "Started processing data."));
            processingCanceled = false;
            // Save the current position, rotation, and scale for later.
            Vector3    position = transform.position;
            Quaternion rotation = transform.rotation;
            Vector3    scale    = transform.localScale;

            // Reset the transform, and wait for the camera models to be updated accordingly.
            GeneralToolkit.SetTransformValues(transform, false, Vector3.zero, Quaternion.identity, Vector3.one);
            yield return(null);

            // Create the processed data directory and initialize the processing information file.
            dataHandler.CreateProcessingInfoFile();
            // Move the processed data directory to the temporary directory.
            GeneralToolkit.Move(PathType.Directory, dataHandler.processedDataDirectory, GeneralToolkit.tempDirectoryAbsolutePath, false);
            AssetDatabase.Refresh();
            if (!Directory.Exists(GeneralToolkit.tempDirectoryAbsolutePath))
            {
                processingCanceled = true;
            }
            // Execute the processing methods.
            for (int iter = 0; iter < processingMethods.Length; iter++)
            {
                ProcessingMethod processingMethod = processingMethods[iter];
                if (!processingMethod.IsGUINested() && processingMethod.shouldExecute)
                {
                    yield return(StartCoroutine(processingMethod.ExecuteAndDisplayLog()));
                }
                if (processingCanceled)
                {
                    break;
                }
            }
            // Unload loaded assets.
            Resources.UnloadUnusedAssets();
            EditorUtility.UnloadUnusedAssetsImmediate();
            yield return(null);

            // Move the processed data directory back to its original position.
            GeneralToolkit.Move(PathType.Directory, GeneralToolkit.tempDirectoryAbsolutePath, dataHandler.processedDataDirectory, false);
            AssetDatabase.Refresh();
            // Update the processed asset information.
            dataHandler.UpdateProcessedAssets();
            // Inform the user of the end of the process.
            if (!processingCanceled)
            {
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Processing), "Finished processing data."));
            }
            else
            {
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Processing), "Processing was canceled."));
            }
            // Perform a check to verify whether data was processed.
            dataHandler.CheckStatusOfDataProcessingAndBundling(out isDataReadyForBundling, out isDataBundled, out processedDataInfo);
            // Return the transform's values to their previous ones.
            GeneralToolkit.SetTransformValues(transform, false, position, rotation, scale);
            // Reset the progress bar.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
Esempio n. 12
0
        /// <inheritdoc/>
        protected override IEnumerator ExecuteMethodCoroutine()
        {
            // Get the processed asset's name and path in the bundle.
            string bundledAssetName      = GetBundledAssetName(colorDataAssetName);
            string colorDataPathRelative = GetAssetPathRelative(bundledAssetName);

            // Check if the asset has already been processed.
            if (!dataHandler.IsAssetAlreadyProcessed(colorDataPathRelative))
            {
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
                // Determine the resolution and depth that should be given to the texture array.
                Vector2Int arrayResolution; int arrayDepth;
                GetCorrectedPowerOfTwoForImages(cameraSetup.cameraModels, out arrayResolution, out arrayDepth);
                // Create an empty texture array.
                colorData = new Texture2DArray(1, 1, 1, TextureFormat.RGBA32, useMipMaps);
                GeneralToolkit.CreateTexture2DArray(ref colorData, arrayResolution, arrayDepth, TextureFormat.RGB24, false, FilterMode.Point, TextureWrapMode.Clamp, useMipMaps);
                // Create an empty texture, with the array's resolution.
                Texture2D arraySlice = new Texture2D(1, 1);
                GeneralToolkit.CreateTexture2D(ref arraySlice, arrayResolution, TextureFormat.RGB24, false, FilterMode.Point, TextureWrapMode.Clamp, useMipMaps);
                // Create an empty texture, in which we will load the set of source images one-by-one.
                Texture2D loadTex = new Texture2D(1, 1);
                // Process as many images as possible from the set of source images.
                for (int i = 0; i < arrayDepth; i++)
                {
                    // Update the progress bar, and enable the user to cancel the process.
                    DisplayAndUpdateCancelableProgressBar();
                    if (GeneralToolkit.progressBarCanceled)
                    {
                        processingCaller.processingCanceled = true;
                        break;
                    }
                    // Load the camera model.
                    CameraModel cameraModel = cameraSetup.cameraModels[i];
                    // Load the image into a texture object.
                    string imagePath = Path.Combine(dataHandler.colorDirectory, cameraModel.imageName);
                    GeneralToolkit.CreateTexture2D(ref loadTex, cameraModel.pixelResolution, TextureFormat.RGB24, false, FilterMode.Point, TextureWrapMode.Clamp, useMipMaps);
                    GeneralToolkit.LoadTexture(imagePath, ref loadTex);
                    // Resize the texture so that it fits the array's resolution.
                    GeneralToolkit.ResizeTexture2D(loadTex, ref arraySlice);
                    // Add the texture to the texture array.
                    colorData.SetPixels(arraySlice.GetPixels(), i);
                    colorData.Apply();
                    yield return(null);
                }
                // If the user has not canceled the process, continue.
                if (!GeneralToolkit.progressBarCanceled)
                {
                    // Create an asset from this texture array.
                    AssetDatabase.CreateAsset(colorData, colorDataPathRelative);
                    AssetDatabase.Refresh();
                }
                // Destroy created objects.
                DestroyImmediate(loadTex);
                DestroyImmediate(arraySlice);
                // Reset the progress bar.
                GeneralToolkit.ResetCancelableProgressBar(true, false);
            }
            Texture2DArray colorDataAsset = AssetDatabase.LoadAssetAtPath <Texture2DArray>(colorDataPathRelative);

            colorData = (Texture2DArray)Instantiate(colorDataAsset);
        }
        public IEnumerator ExecuteAndWaitForMemoryRelease(IEnumerator executeCoroutine)
        {
            // Reset the progress bar.
            GeneralToolkit.ResetCancelableProgressBar(true, false);
            // Display the progress bar.
            string progressBarTitle = "COLIBRI VR - Wait for memory cleanup";
            string progressBarInfo  = "Waiting for memory cleanup...";
            string exitMessage      = "Skipping wait for memory cleanup.";

            GeneralToolkit.UpdateCancelableProgressBar(typeof(ProcessingMethod), true, false, false, 2, progressBarTitle, progressBarInfo, exitMessage);
            // Get the initial memory.
            float BtoGB = Mathf.Pow(10, -9);
            float initialAllocatedMemoryGB = UnityEngine.Profiling.Profiler.GetTotalAllocatedMemoryLong() * BtoGB;
            float initialReservedMemoryGB  = UnityEngine.Profiling.Profiler.GetTotalReservedMemoryLong() * BtoGB;

            // Execute the process.
            yield return(StartCoroutine(executeCoroutine));

            // Prepare waiting for memory release.
            int   averageOverNFrames           = 20;
            int   currentFrameIter             = 0;
            int   thresholdWaitTimeMs          = 10000;
            float thresholdMemoryDiffGB        = 0.001f;
            float currentAllocatedMemoryDiffGB = 2 * thresholdMemoryDiffGB;
            float currentReservedMemoryDiffGB  = currentAllocatedMemoryDiffGB;
            float iterAllocatedMemoryGB        = 0;
            float iterReservedMemoryGB         = 0;

            // Start the stopwatch.
            System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch();
            stopwatch.Start();
            // Continue while the memory hasn't been released.
            while ((currentAllocatedMemoryDiffGB > thresholdMemoryDiffGB || currentReservedMemoryDiffGB > thresholdMemoryDiffGB) && !GeneralToolkit.progressBarCanceled)
            {
                // Get an average over several frames of the current memory.
                float frameAllocatedMemoryGB = UnityEngine.Profiling.Profiler.GetTotalAllocatedMemoryLong() * BtoGB;
                iterAllocatedMemoryGB += frameAllocatedMemoryGB;
                float frameReservedMemoryGB = UnityEngine.Profiling.Profiler.GetTotalReservedMemoryLong() * BtoGB;
                iterReservedMemoryGB += frameReservedMemoryGB;
                currentFrameIter++;
                if (currentFrameIter >= averageOverNFrames)
                {
                    float currentAllocatedMemoryGB = iterAllocatedMemoryGB / currentFrameIter;
                    currentAllocatedMemoryDiffGB = Mathf.Max(0, currentAllocatedMemoryGB - initialAllocatedMemoryGB);
                    float currentReservedMemoryGB = iterReservedMemoryGB / currentFrameIter;
                    currentReservedMemoryDiffGB = Mathf.Max(0, currentReservedMemoryGB - initialReservedMemoryGB);
                    iterAllocatedMemoryGB       = 0;
                    iterReservedMemoryGB        = 0;
                    currentFrameIter            = 0;
                    // If too long a time has elapsed, exit with a warning.
                    if (stopwatch.ElapsedMilliseconds > thresholdWaitTimeMs)
                    {
                        string warningMessage = "This object may be leaking memory during processing. ";
                        warningMessage += "Allocated went from " + initialAllocatedMemoryGB + "GB to " + currentAllocatedMemoryGB + "GB. ";
                        warningMessage += "Reserved went from " + initialReservedMemoryGB + "GB to " + currentReservedMemoryGB + "GB.";
                        Debug.LogWarning(GeneralToolkit.FormatScriptMessage(this.GetType(), warningMessage));
                        break;
                    }
                }
                yield return(null);
            }
            // Stop the stopwatch.
            stopwatch.Stop();
            // Reset the progress bar.
            GeneralToolkit.ResetCancelableProgressBar(true, false);
        }
        /// <summary>
        /// Moves the acquisition camera into the desired poses, and acquires the corresponding color and depth data.
        /// </summary>
        /// <returns></returns>
        private IEnumerator CaptureSceneCoroutine()
        {
            // Inform the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Acquisition), "Started acquiring scene data."));
            // Find the camera models if needed.
            if (cameraSetup.cameraModels == null)
            {
                cameraSetup.FindCameraModels();
            }
            if (cameraSetup.cameraModels == null)
            {
                yield break;
            }
            // Store the initial preview index.
            int initialPreviewIndex = cameraSetup.previewIndex;

            // Store the pose data and camera parameters into a file.
            SaveAcquisitionInformation(dataHandler, cameraSetup);
            // If desired, save the scene's meshes as a global asset.
            if (_copyGlobalMesh)
            {
                SaveGlobalMesh();
            }
            // Acquire data for each source pose.
            for (int i = 0; i < cameraSetup.cameraModels.Length; i++)
            {
                // Display and update the progress bar.
                DisplayAndUpdateCancelableProgressBar();
                if (GeneralToolkit.progressBarCanceled)
                {
                    break;
                }
                // Change the preview index to the current camera.
                cameraSetup.previewIndex = i;
                string imageName = cameraSetup.cameraModels[i].imageName;
                // Update the camera model for the preview camera, thereby rendering to the target color and depth textures.
                UpdatePreviewCameraModel(true);
                // Save the color texture as a file.
                GeneralToolkit.SaveRenderTextureToPNG(_previewCameraManager.targetTexture, Path.Combine(dataHandler.colorDirectory, imageName));
                // If depth data is to be acquired, save the scene's depth (encoded as a 3-channel RGB texture) as a file.
                if (_acquireDepthData)
                {
                    _distanceToColorMat.SetInt(shaderNameIsPrecise, 1);
                    Graphics.Blit(_targetDepthTexture, _distanceAsColorTexture, _distanceToColorMat);
                    GeneralToolkit.SaveRenderTextureToPNG(_distanceAsColorTexture, Path.Combine(dataHandler.depthDirectory, imageName));
                }
                yield return(null);
            }
            // If the process completes without being canceled, inform the user.
            if (!GeneralToolkit.progressBarCanceled)
            {
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Acquisition), "Successfully acquired scene data."));
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(Acquisition), "Data can be found in directory: " + dataHandler.dataDirectory + "."));
            }
            // Reset displayed information.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
            // Reset the preview camera's pose.
            cameraSetup.previewIndex = initialPreviewIndex;
            UpdatePreviewCameraModel(false);
            UnityEditorInternal.InternalEditorUtility.RepaintAllViews();
        }
        /// <summary>
        /// Coroutine that runs the sparse reconstruction process.
        /// </summary>
        /// <param name="caller"></param> The processing object calling this method.
        /// <param name="workspace"></param> The workspace from which to perform this method.
        /// <param name="COLMAPCameraIndex"></param> The index of the type of source camera (in the list of COLMAP cameras).
        /// <param name="isSingleCamera"></param> True if the source images were acquired by the same camera, false otherwise.
        /// <param name="maxImageSize"></param> The maximum image size for the undistortion step.
        /// <returns></returns>
        public static IEnumerator RunSparseReconstructionCoroutine(Processing.Processing caller, string workspace, int COLMAPCameraIndex, bool isSingleCamera, int maxImageSize)
        {
            // Indicate to the user that the process has started.
            GeneralToolkit.ResetCancelableProgressBar(true, true);
            // Create or clear the folders needed for the reconstruction.
            GeneralToolkit.Delete(GetDatabaseFile(workspace));
            GeneralToolkit.CreateOrClear(PathType.Directory, GetSparseDir(workspace));
            GeneralToolkit.CreateOrClear(PathType.Directory, GetSparse0Dir(workspace));
            GeneralToolkit.CreateOrClear(PathType.Directory, GetDenseDir(workspace));
            GeneralToolkit.CreateOrClear(PathType.Directory, GetDense0Dir(workspace));
            // Initialize the command parameters.
            bool displayProgressBar = true;
            bool stopOnError        = true;

            string[] progressBarParams = new string[3];
            int      maxStep           = 6;

            progressBarParams[0] = GeneralToolkit.ToString(maxStep);
            progressBarParams[2] = "Processing canceled by user.";
            // Launch the different steps of the sparse reconstruction process.
            float focalLengthFactor = 0;

            for (int step = 1; step <= maxStep; step++)
            {
                // Step one: launch feature extraction.
                if (step == 1)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Feature extraction", true, step, maxStep);
                    CameraModel[] cameraModels = caller.cameraSetup.cameraModels;
                    if (cameraModels != null && cameraModels.Length > 0)
                    {
                        CameraModel cameraParams = cameraModels[0];
                        float       focalLength  = Camera.FieldOfViewToFocalLength(cameraParams.fieldOfView.x, cameraParams.pixelResolution.x);
                        focalLengthFactor = focalLength / Mathf.Max(cameraParams.pixelResolution.x, cameraParams.pixelResolution.y);
                    }
                    yield return(caller.StartCoroutine(RunFeatureExtractionCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams, COLMAPCameraIndex, isSingleCamera, focalLengthFactor)));
                }
                // Step two: launch feature matching.
                else if (step == 2)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Feature matching", true, step, maxStep);
                    yield return(caller.StartCoroutine(RunFeatureMatchingCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams)));
                }
                // Step three: launch mapping.
                else if (step == 3)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Mapping", true, step, maxStep);
                    yield return(caller.StartCoroutine(RunMappingCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams, (focalLengthFactor > 0))));
                }
                // Step four: launch exporting original camera setup as text.
                else if (step == 4)
                {
                    progressBarParams[1] = GetProgressBarParamsOne("Exporting camera setup (original) as text", true, step, maxStep);
                    yield return(caller.StartCoroutine(RunExportModelAsTextCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams)));
                }
                // Step five: launch image undistortion.
                else if (step == 5)
                {
                    // Launch undistortion.
                    progressBarParams[1] = GetProgressBarParamsOne("Undistortion", true, step, maxStep);
                    yield return(caller.StartCoroutine(RunUndistortionCommand(caller, workspace, displayProgressBar, stopOnError, progressBarParams, maxImageSize)));
                    // Change the workspace and the data directory to the one created in the dense folder.
                    // workspace = GetDense0Dir(workspace);
                    // caller.dataHandler.ChangeDataDirectory(caller, workspace);
                    // Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(COLMAPConnector), "Changed data directory to: " + workspace));
                }
                // Step six: launch exporting undistorted camera setup as text.
                else if (step == 6)
                {
                    // Launch export process.
                    progressBarParams[1] = GetProgressBarParamsOne("Exporting camera setup (undistorted) as text", true, step, maxStep);
                    yield return(caller.StartCoroutine(RunExportModelAsTextCommand(caller, GetDense0Dir(workspace), displayProgressBar, stopOnError, progressBarParams)));

                    // Display the parsed camera setup in the Scene view.
                    caller.Deselected();
                    caller.Selected();
                    yield return(null);
                }
                // For each step, continue only if the user does not cancel the process.
                if (GeneralToolkit.progressBarCanceled)
                {
                    break;
                }
            }
            // Change the data directory to the one created in the dense folder.
            if (!GeneralToolkit.progressBarCanceled)
            {
                Debug.Log(GeneralToolkit.FormatScriptMessage(typeof(COLMAPConnector), "Sparse reconstruction was a success."));
            }
            // Indicate to the user that the process has ended.
            GeneralToolkit.ResetCancelableProgressBar(false, false);
        }
Esempio n. 16
0
 /// <summary>
 /// Clears a coroutine that launched a Blender python command.
 /// </summary>
 private static void ClearBlenderCoroutine()
 {
     // Indicate to the user that the process has ended.
     GeneralToolkit.ResetCancelableProgressBar(false, false);
 }