/// <summary> /// Creates or resets a data handler object as child of the given transform. /// </summary> /// <param name="parentTransform"></param> The parent transform. /// <returns></returns> The data handler object. public static DataHandler CreateOrResetDataHandler(Transform parentTransform = null) { DataHandler existingDataHandler = GeneralToolkit.GetOrCreateChildComponent <DataHandler>(parentTransform); existingDataHandler.Reset(); return(existingDataHandler); }
/// <summary> /// On destroy, saves an update indicator if exiting play mode after reconstruction. /// </summary> void OnDestroy() { if (hasPerformedSparseReconstruction && EditorApplication.isPlaying && GeneralToolkit.IsStartingNewScene()) { PrefabUtility.SaveAsPrefabAsset(gameObject, Path.Combine("Assets", _updateDirectoryIndicatorPathEnd)); } }
/// <summary> /// Initializes the distance map as a Texture2D. /// </summary> public void InitializeDistanceMap() { distanceMap = new Texture2D(1, 1); _correctedPixelResolution = cameraModel.pixelResolution; CorrectForMemorySize(ref _correctedPixelResolution); GeneralToolkit.CreateTexture2D(ref distanceMap, _correctedPixelResolution, TextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp, false); }
/// <summary> /// On any camera being rendered to, add the camera to the list of cameras to which transfer data by way of a helper class. /// </summary> void OnRenderObject() { Camera currentCamera = Camera.current; if (_helperULRCameras == null) { _helperULRCameras = new List <Camera>(); } if (!_helperULRCameras.Contains(currentCamera)) { if (PMGlobalMeshEF.globalMesh != null) { Helper_ULRCamera helperULRCamera = GeneralToolkit.GetOrAddComponent <Helper_ULRCamera>(currentCamera.gameObject); int totalVertexCount = PMGlobalMeshEF.globalMesh.vertexCount; helperULRCamera.AddRenderedObject(this, totalVertexCount); _helperULRCameras.Add(currentCamera); } } if (initialized) { // Update the buffers with the parameters of the source cameras. FillULRArrays(); UpdateULRBuffers(); // Update the blending material with the buffers. UpdateBlendingMaterialParameters(ref currentBlendingMaterial); #if UNITY_EDITOR cameraSetup.SetColorIsIndices(ref currentBlendingMaterial); #endif //UNITY_EDITOR } }
public bool VehiclePassableInPolygon(ArbiterLane al, VehicleAgent va, Polygon p, VehicleState ourState, Polygon circ) { Polygon vehiclePoly = va.GetAbsolutePolygon(ourState); vehiclePoly = Polygon.ConvexMinkowskiConvolution(circ, vehiclePoly); List <Coordinates> pointsOutside = new List <Coordinates>(); ArbiterLanePartition alp = al.GetClosestPartition(va.ClosestPosition); foreach (Coordinates c in vehiclePoly) { if (!p.IsInside(c)) { pointsOutside.Add(c); } } foreach (Coordinates m in pointsOutside) { foreach (Coordinates n in pointsOutside) { if (!m.Equals(n)) { if (GeneralToolkit.TriangleArea(alp.Initial.Position, m, alp.Final.Position) * GeneralToolkit.TriangleArea(alp.Initial.Position, n, alp.Final.Position) < 0) { return(false); } } } } return(true); }
/// <summary> /// Coroutine that merges the color data into a single texture map per submesh, and stores this information as a texture asset. /// </summary> /// <returns></returns> private IEnumerator StoreGlobalTextureMapCoroutine() { // Reset the progress bar. GeneralToolkit.ResetCancelableProgressBar(true, false); // Per call, fetch the assets and initialize a camera and materials. InitializePerCall(); yield return(null); // Create and save a texture map for each submesh of the global mesh. int textureMapCount = PMGlobalMeshEF.globalMesh.subMeshCount; textureMaps = new Texture2D[textureMapCount]; for (_submeshIndex = 0; _submeshIndex < textureMapCount; _submeshIndex++) { // Check if the progress bar has been canceled. if (GeneralToolkit.progressBarCanceled) { processingCaller.processingCanceled = true; break; } // Update the message on the progress bar. DisplayAndUpdateCancelableProgressBar(); // Per submesh, determine the texture map resolution, compute the texture map, and save it as an asset. ComputeTextureMapResolution(); ComputeAndStoreTextureMap(); yield return(null); } // Destroy all objects created per call. ClearPerCall(); // Reset the progress bar. GeneralToolkit.ResetCancelableProgressBar(true, false); }
/// <summary> /// Destroys all objects created per call. /// </summary> private void ClearPerCall() { // Reactivate deactivated renderers. GeneralToolkit.ReactivateOtherActiveComponents(_deactivatedRendererGOs); // Destroy created objects. if (_previewCameraModel != null) { DestroyImmediate(_previewCameraModel.gameObject); } if (_previewCameraManager != null) { _previewCameraManager.DestroyPreviewCamera(); DestroyImmediate(_previewCameraManager.gameObject); } if (_renderToTextureMapMat != null) { DestroyImmediate(_renderToTextureMapMat); } if (_normalizeByAlphaMat != null) { DestroyImmediate(_normalizeByAlphaMat); } if (_helperULR != null) { _helperULR.ClearAll(); DestroyImmediate(_helperULR); } }
/// <summary> /// Writes the parameters from the given camera models into a text file. /// The format is that of COLMAP's "cameras.txt" file, and can be read directly. /// </summary> /// <param name="cameraModels"></param> The camera models to be written to file. /// <param name="workspace"></param> The workspace from which to work. public static void SaveCamerasInformation(CameraModel[] cameraModels, string workspace) { StringBuilder stringBuilder = new StringBuilder(); // Set up the file's header. stringBuilder.AppendLine("# Camera list with one line of data per camera:"); stringBuilder.AppendLine("# CAMERA_ID, MODEL, WIDTH, HEIGHT, PARAMS[]"); stringBuilder.AppendLine("# Number of cameras: " + GeneralToolkit.ToString(cameraModels.Length)); // Compute strings for each parameter. for (int iter = 0; iter < cameraModels.Length; iter++) { CameraModel cameraModel = cameraModels[iter]; string CAMERA_ID = GeneralToolkit.ToString(cameraModel.cameraReferenceIndex); string MODEL = cameraModel.modelName; string WIDTH = GeneralToolkit.ToString(cameraModel.pixelResolution.x); string HEIGHT = GeneralToolkit.ToString(cameraModel.pixelResolution.y); string PARAMS_FOCALLENGTH = GeneralToolkit.ToString(Camera.FieldOfViewToFocalLength(cameraModel.fieldOfView.x, cameraModel.pixelResolution.x)); string PARAMS_CENTERWIDTH = GeneralToolkit.ToString(cameraModel.pixelResolution.x / 2); string PARAMS_CENTERHEIGHT = GeneralToolkit.ToString(cameraModel.pixelResolution.y / 2); string line = CAMERA_ID + " " + MODEL + " " + WIDTH + " " + HEIGHT + " " + PARAMS_FOCALLENGTH + " " + PARAMS_CENTERWIDTH + " " + PARAMS_CENTERHEIGHT; stringBuilder.AppendLine(line); } // Write the header and parameters into a .txt file. File.WriteAllText(GetCamerasFile(workspace), stringBuilder.ToString()); }
/// <summary> /// Writes the parameters from the given array of camera models into a text file. /// The format is that of COLMAP's "images.txt" file, and can be read directly. /// </summary> /// <param name="cameraModels"></param> The camera models to parse to obtain image information. /// <param name="workspace"></param> The workspace from which to work. public static void SaveImagesInformation(CameraModel[] cameraModels, string workspace) { StringBuilder stringBuilder = new StringBuilder(); // Set up the file's header. stringBuilder.AppendLine("# Image list with two lines of data per image:"); stringBuilder.AppendLine("# IMAGE_ID, QW, QX, QY, QZ, TX, TY, TZ, CAMERA_ID, NAME"); stringBuilder.AppendLine("# POINTS2D[] as (X, Y, POINT3D_ID)"); stringBuilder.AppendLine("# Number of images: " + cameraModels.Length + ", mean observations per image: 0"); // For each camera model, extract the parameters and write them as strings. for (int i = 0; i < cameraModels.Length; i++) { // Convert position and rotation to COLMAP's coordinate system. Quaternion rotation = cameraModels[i].transform.rotation; Vector3 position = cameraModels[i].transform.position; ConvertCoordinatesUnityToCOLMAP(ref position, ref rotation); // Compute strings for each parameter. Note that we only use this for acquisition, where there is only one camera. string IMAGE_ID = GeneralToolkit.ToString(i + 1); string QW = GeneralToolkit.ToString(rotation.w); string QX = GeneralToolkit.ToString(rotation.x); string QY = GeneralToolkit.ToString(rotation.y); string QZ = GeneralToolkit.ToString(rotation.z); string TX = GeneralToolkit.ToString(position.x); string TY = GeneralToolkit.ToString(position.y); string TZ = GeneralToolkit.ToString(position.z); string CAMERA_ID = "1"; string NAME = cameraModels[i].imageName; string line = IMAGE_ID + " " + QW + " " + QX + " " + QY + " " + QZ + " " + TX + " " + TY + " " + TZ + " " + CAMERA_ID + " " + NAME; // For each image, append one line with the parameters and one empty line. stringBuilder.AppendLine(line); stringBuilder.AppendLine(string.Empty); } // Write the header and parameters into a .txt file. File.WriteAllText(GetImagesFile(workspace), stringBuilder.ToString()); }
/// <summary> /// Coroutine that runs the feature matching command. /// </summary> /// <param name="caller"></param> The object calling this method. /// <param name="workspace"></param> The workspace from which to perform this step. /// <param name="displayProgressBar"></param> True if the progress bar should be displayed, false otherwise. /// <param name="stopOnError"></param> True if the process should stop on error, false otherwise. /// <param name="progressBarParams"></param> Parameters for the progress bar. /// <returns></returns> public static IEnumerator RunFeatureMatchingCommand(MonoBehaviour caller, string workspace, bool displayProgressBar, bool stopOnError, string[] progressBarParams) { string command = "CALL " + COLMAPSettings.formattedCOLMAPExePath + " exhaustive_matcher"; command += " --database_path ./" + databaseFileName; yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(COLMAPConnector), command, workspace, displayProgressBar, null, null, stopOnError, progressBarParams))); }
/// <summary> /// Coroutine that runs the stereo command. /// </summary> /// <param name="caller"></param> The object calling this method. /// <param name="workspace"></param> The workspace from which to perform this step. /// <param name="displayProgressBar"></param> True if the progress bar should be displayed, false otherwise. /// <param name="stopOnError"></param> True if the process should stop on error, false otherwise. /// <param name="progressBarParams"></param> Parameters for the progress bar. /// <returns></returns> public static IEnumerator RunStereoCommand(MonoBehaviour caller, string workspace, bool displayProgressBar, bool stopOnError, string[] progressBarParams) { string command = "CALL " + COLMAPSettings.formattedCOLMAPExePath + " patch_match_stereo"; command += " --workspace_path ./"; yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(COLMAPConnector), command, workspace, displayProgressBar, null, null, stopOnError, progressBarParams))); }
/// <inheritdoc/> public override void Reset() { base.Reset(); // Initialize the helper methods. _helperFocalSurfaces = GeneralToolkit.GetOrAddComponent <Helper_FocalSurfaces>(gameObject); _helperFocalSurfaces.Reset(); }
/// <inheritdoc/> public override void SectionAdditionalParameters() { SerializedObject serializedObject = new SerializedObject(this); serializedObject.Update(); SerializedProperty propertyGlobalMeshPath = serializedObject.FindProperty(_propertyNameGlobalMeshPathAbsolute); // Enable the user to select another mesh by modifying the path. if (string.IsNullOrEmpty(_globalMeshPathAbsolute)) { string[] extensionsArray = new string[] { ".asset", ".obj", ".fbx" }; FileInfo[] files = GeneralToolkit.GetFilesByExtension(dataHandler.dataDirectory, extensionsArray); if (files.Length > 0) { _globalMeshPathAbsolute = files[0].FullName; } } string searchTitle = "Select global mesh"; string tooltip = "Select the global mesh to use for rendering."; string extensions = "FBX,fbx,OBJ,obj,ASSET,asset"; string outPath; bool clicked; GeneralToolkit.EditorPathSearch(out clicked, out outPath, PathType.File, _globalMeshPathAbsolute, searchTitle, tooltip, Color.grey, true, extensions); propertyGlobalMeshPath.stringValue = outPath; serializedObject.ApplyModifiedProperties(); }
/// <summary> /// Resets the object's properties. /// </summary> void Reset() { // Set the default values. UpdateRenderingObjectsList(); launchOrderIndex = renderingObjectCount; readyToBeLaunched = false; _launchOnAwake = true; _launched = false; _previewRenderTexture = null; _previewEvalTexture = null; // Get the processing component. If this was not called on AddComponent, reset it as well. _processing = GeneralToolkit.GetOrAddComponent <Processing.Processing>(gameObject); if (_processing.renderingCaller != null) { _processing.Reset(); } else { _processing.renderingCaller = this; } // Initialize the rendering methods and helpers. This has to be done after the processing component has been reset. _renderingMethods = RenderingMethod.CreateOrResetRenderingMethods(transform); Method[] methods = GetComponentsInChildren <Method>(); for (int iter = 0; iter < methods.Length; iter++) { methods[iter].InitializeLinks(); } // Initialize the evaluation methods. _evaluationMethods = EvaluationMethod.GetOrCreateEvaluationMethods(transform); #if UNITY_EDITOR UnityEditorInternal.InternalEditorUtility.SetIsInspectorExpanded(processing, false); #endif //UNITY_EDITOR }
/// <summary> /// Displays and updates a cancelable progress bar informing on the processing process. /// </summary> /// <param name="progressBarTitle"></param> The title to be displayed by the progress bar. /// <param name="progressBarInfo"></param> The information to be displayed by the progress bar. public void DisplayAndUpdateCancelableProgressBar(string progressBarTitle, string progressBarInfo) { int progressBarMaxIter = cameraSetup.cameraModels.Length; string exitMessage = "Processing canceled by user."; GeneralToolkit.UpdateCancelableProgressBar(typeof(Processing), true, true, true, progressBarMaxIter, progressBarTitle, progressBarInfo, exitMessage); }
/// <summary> /// Displays a GUI enabling the user to modify various rendering parameters. /// </summary> public override void OnInspectorGUI() { // Start the GUI. GeneralToolkit.EditorStart(serializedObject, _targetObject); // Enable the user to select the source data directory. GeneralToolkit.EditorNewSection("Source data"); SectionDataDirectory(); // Enable the user to specify launch options. GeneralToolkit.EditorNewSection("Launch options"); SectionLaunchOptions(); // Start a change check. EditorGUI.BeginChangeCheck(); // Enable the user to select a blending method. GeneralToolkit.EditorNewSection("Blending method"); SectionBlendingMethod(); // Enable the user to select an evaluation method. GeneralToolkit.EditorNewSection("Evaluation method"); SectionEvaluationMethod(); // End the change check. bool shouldPreviewChange = EditorGUI.EndChangeCheck(); // End the GUI. GeneralToolkit.EditorEnd(serializedObject); // If the preview should change, inform the target object to update the preview images. if (shouldPreviewChange) { _targetObject.processing.cameraSetup.onPreviewIndexChangeEvent.Invoke(); } }
/// <summary> /// Displays a GUI enabling the user to create a mesh based on the camera's Z-buffer. /// </summary> public override void OnInspectorGUI() { // Start the GUI. GeneralToolkit.EditorStart(serializedObject, _targetObject); // Start a change check. EditorGUI.BeginChangeCheck(); // Enable the user to choose the camera model. GeneralToolkit.EditorNewSection("Camera model"); CameraModelEditor.SectionCamera(_objectCameraModel); // End the change check. bool shouldUpdatePreview = EditorGUI.EndChangeCheck(); // Enable the user to choose parameters for depth processing. GeneralToolkit.EditorNewSection("Depth processing parameters"); SectionDepthProcessing(_targetObject.GetCameraModel().isOmnidirectional); // Enable the user to generate the mesh. GeneralToolkit.EditorNewSection("Generate"); SectionGenerateButton(); // End the GUI. GeneralToolkit.EditorEnd(serializedObject); // If the preview window should be updated, notify the target object. if (shouldUpdatePreview) { ((PerViewMeshesQSTRDB)_targetObject).UpdateCameraModel(); } }
/// <summary> /// Enables the user to perform automatic retopology on the .OBJ mesh via Instant Meshes. /// </summary> /// <param name="serializedObject"></param> The serialized object to modify. public void SubsectionRunInstantMeshesOBJ(SerializedObject serializedObject) { EditorGUILayout.Space(); string workspace = dataHandler.dataDirectory; string inputFilePath = Path.Combine(workspace, BlenderConnector.convertPLYtoOBJOutputFileName); string outputFilePath = inputFilePath; string label = "Perform automatic retopology."; string tooltip = "This will re-mesh the .OBJ file at \"" + GeneralToolkit.FormatPathForCommand(inputFilePath) + "\"."; // Check if this option is available. bool isGUIEnabled = GUI.enabled; GUI.enabled = isGUIEnabled && File.Exists(inputFilePath) && Application.isPlaying; GeneralToolkit.EditorRequirePlayMode(ref tooltip);// Display a button to launch the helper method. bool hasPressed = GeneralToolkit.EditorWordWrapLeftButton(new GUIContent("Run", tooltip), new GUIContent(label, tooltip)); // If the button is pressed, launch the method. if (hasPressed) { StartCoroutine(InstantMeshesConnector.RunInstantMeshesCoroutine(this, workspace, inputFilePath, outputFilePath, _reduceVertexCountToRecommended)); } // Provide the option to reduce the face count to the value recommended by Instant Meshes, or to use the current vertex count. SerializedProperty propertyReduceVertexCountToRecommended = serializedObject.FindProperty(_propertyNameReduceVertexCountToRecommended); label = "Reduce vertex count:"; tooltip = "If true, reduces the vertex count to the value recommended by Instant Meshes. Otherwise, aims to keep the same vertex count."; tooltip += " For scenes in which the region of interest is small compared to the bounds of the mesh, it is recommended to turn this off."; propertyReduceVertexCountToRecommended.boolValue = EditorGUILayout.Toggle(new GUIContent(label, tooltip), propertyReduceVertexCountToRecommended.boolValue); // Reset the GUI. GUI.enabled = isGUIEnabled; EditorGUILayout.Space(); }
/// <summary> /// Fetches the global mesh and initializes the camera and materials. /// </summary> private void InitializePerCall() { // Deactivate any other renderer in the scene. _deactivatedRendererGOs = GeneralToolkit.DeactivateOtherActiveComponents <Renderer>(gameObject); // Create a preview camera manager and initialize it with the camera model's pose and parameters. _previewCameraModel = CameraModel.CreateCameraModel(); _previewCameraModel.transform.position = Vector3.zero; _previewCameraModel.transform.rotation = Quaternion.identity; _previewCameraModel.fieldOfView = 60f * Vector2.one; float focalLength = Camera.FieldOfViewToFocalLength(_previewCameraModel.fieldOfView.x, 1f); _previewCameraManager = new GameObject("Preview Camera Manager").AddComponent <PreviewCameraManager>(); Transform previewCameraTransform = new GameObject("Preview Camera").transform; GeneralToolkit.CreateRenderTexture(ref _previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp); _previewCameraManager.CreatePreviewCamera(_previewCameraManager.gameObject, previewCameraTransform, _previewCameraModel); _previewCameraManager.previewCamera.clearFlags = CameraClearFlags.Color; _previewCameraManager.previewCamera.backgroundColor = Color.clear; // Create the materials. _renderToTextureMapMat = new Material(GeneralToolkit.shaderProcessingGlobalTextureMap); _renderToTextureMapMat.SetFloat(_shaderNameFocalLength, focalLength); _normalizeByAlphaMat = new Material(GeneralToolkit.shaderNormalizeByAlpha); // Initialize the helper object for ULR. _helperULR = gameObject.AddComponent <Rendering.Helper_ULR>(); _helperULR.Reset(); _helperULR.InitializeLinks(); _helperULR.blendCamCount = Rendering.Helper_ULR.maxBlendCamCount; _helperULR.numberOfSourceCameras = PMColorTextureArray.colorData.depth; _helperULR.CreateULRBuffersAndArrays(); _helperULR.InitializeBlendingMaterialParameters(ref _renderToTextureMapMat); _helperULR.currentBlendingMaterial = _renderToTextureMapMat; _helperULR.initialized = true; }
/// <summary> /// Coroutine that performs automatic retopology on a given mesh using the Instant Meshes implementation. /// </summary> /// <param name="caller"></param> The object calling this method. /// <param name="workspace"></param> The workspace from which to launch the command. /// <param name="inputFilePath"></param> The full path to the input .PLY or .OBJ file. /// <param name="outputFilePath"></param> The full path to the output .PLY or .OBJ file. /// <param name="blenderHelper"></param> The helper component for Blender. /// <returns></returns> public static IEnumerator RunInstantMeshesCoroutine(MonoBehaviour caller, string workspace, string inputFilePath, string outputFilePath, BlenderHelper blenderHelper) { // Indicate to the user that the process has started. GeneralToolkit.ResetCancelableProgressBar(true, true); // Initialize the command parameters. bool displayProgressBar = true; bool stopOnError = true; string[] progressBarParams = new string[3]; progressBarParams[0] = "2"; progressBarParams[1] = "Automatic retopology"; progressBarParams[2] = "Processing canceled by user."; // Prepare the command. string formattedExePath = InstantMeshesSettings.formattedInstantMeshesExePath; string command = "CALL " + formattedExePath; command += " --output " + GeneralToolkit.FormatPathForCommand(outputFilePath); command += " --deterministic --boundaries --rosy 6 --posy 6"; // If there is a Blender helper, use the determined mesh face count to define the desired face count. if (blenderHelper != null && blenderHelper.meshFaceCount != -1) { command += " --faces " + GeneralToolkit.ToString(blenderHelper.meshFaceCount); } // Launch the command. command += " " + GeneralToolkit.FormatPathForCommand(inputFilePath); yield return(caller.StartCoroutine(GeneralToolkit.RunCommandCoroutine(typeof(InstantMeshesConnector), command, workspace, displayProgressBar, null, null, stopOnError, progressBarParams))); // If there is a Blender helper, update the mesh's face count. if (blenderHelper != null) { blenderHelper.CheckOBJMeshInfo(workspace); } // Indicate to the user that the process has ended. GeneralToolkit.ResetCancelableProgressBar(false, false); }
/// <summary> /// Enables the user to launch dense 3D reconstruction and meshing via COLMAP. /// </summary> /// <param name="workspace"></param> The workspace from which to perform this method. private void SubsectionDenseReconstruction() { EditorGUILayout.Space(); string workspace = dataHandler.dataDirectory; string label = "Reconstruct 3D mesh (.PLY) from sparse camera setup."; string tooltip = "Processed geometry will be stored at: \"" + COLMAPConnector.GetDelaunayFile(workspace) + "\"."; // Check if this option is available. bool isGUIEnabled = GUI.enabled; GUI.enabled = isGUIEnabled && (cameraSetup != null && cameraSetup.cameraModels != null && cameraSetup.cameraModels.Length > 0) && (dataHandler != null && dataHandler.imagePointCorrespondencesExist) && Application.isPlaying; GeneralToolkit.EditorRequirePlayMode(ref tooltip); // Display a button to launch the helper method. bool hasPressed = GeneralToolkit.EditorWordWrapLeftButton(new GUIContent("Run", tooltip), new GUIContent(label, tooltip)); // If the button is pressed, display a dialog to confirm. if (hasPressed) { label = "Existing data will be erased. Are you ready to proceed?"; tooltip = "WARNING: Before launching this step, please check that you are using a CUDA-capable GPU. Your current GPU is a " + SystemInfo.graphicsDeviceName + "."; tooltip += "\n\nLaunching this process will erase data in the folder: \"" + workspace + "\". Are you ready to proceed?"; // If the user confirms, update the workspace directory and launch the method. int chosenButton = EditorUtility.DisplayDialogComplex(label, tooltip, "Yes", "No", "Check whether my GPU is CUDA-capable"); if (chosenButton == 0) { StartCoroutine(COLMAPConnector.RunDenseReconstructionCoroutine(this, workspace, cameraSetup.cameraModels.Length)); } else if (chosenButton == 2) { Application.OpenURL("https://developer.nvidia.com/cuda-gpus"); } } // Reset the GUI. GUI.enabled = isGUIEnabled; EditorGUILayout.Space(); }
/// <summary> /// On destroy, destroys the created objects. /// </summary> void OnDestroy() { if (!GeneralToolkit.IsStartingNewScene()) { GeneralToolkit.RemoveChildComponents(transform, typeof(CameraSetup), typeof(DataHandler), typeof(PreviewCameraManager)); } }
/// <summary> /// Gets or creates the package settings, stored as an asset in the project folder. /// </summary> /// <returns></returns> The package settings. private static COLIBRIVRSettings GetOrCreateSettings() { if (!Directory.Exists(settingsFolderAbsolutePath)) { GeneralToolkit.CreateOrClear(PathType.Directory, settingsFolderAbsolutePath); } if (!Directory.Exists(settingsResourcesAbsolutePath)) { GeneralToolkit.CreateOrClear(PathType.Directory, settingsResourcesAbsolutePath); } string settingsAssetPath = Path.Combine(GeneralToolkit.ToRelativePath(COLIBRIVRSettings.settingsFolderAbsolutePath), "COLIBRIVRSettings.asset"); COLIBRIVRSettings settings = AssetDatabase.LoadAssetAtPath <COLIBRIVRSettings>(settingsAssetPath); if (settings == null) { settings = ScriptableObject.CreateInstance <COLIBRIVRSettings>(); settings.COLMAPSettings = (COLMAPSettings)ScriptableObject.CreateInstance <COLMAPSettings>().Initialize(); settings.BlenderSettings = (BlenderSettings)ScriptableObject.CreateInstance <BlenderSettings>().Initialize(); settings.InstantMeshesSettings = (InstantMeshesSettings)ScriptableObject.CreateInstance <InstantMeshesSettings>().Initialize(); settings.previewMaxResolution = 256; AssetDatabase.CreateAsset(settings, settingsAssetPath); AssetDatabase.AddObjectToAsset(settings.COLMAPSettings, settingsAssetPath); AssetDatabase.AddObjectToAsset(settings.BlenderSettings, settingsAssetPath); AssetDatabase.AddObjectToAsset(settings.InstantMeshesSettings, settingsAssetPath); AssetDatabase.SaveAssets(); } return(settings); }
/// <summary> /// Updates the preview camera with the camera model, and displays the rendered view in the preview window. /// </summary> /// <param name="useFullResolution"></param> Whether to use the full resolution (capture) or one limited for preview (preview window). public void UpdatePreviewCameraModel(bool useFullResolution) { // The preview camera manager, and its camera, need to have been initialized in a previous step. if (_previewCameraManager != null && _previewCameraManager.previewCamera != null) { // Update the preview camera's camera model, and render the preview image. CameraModel cameraParams = cameraSetup.cameraModels[cameraSetup.previewIndex]; _previewCameraManager.UpdateCameraModel(cameraParams, useFullResolution); _previewCameraManager.RenderPreviewToTarget(ref _previewCameraManager.targetTexture, false); int previewMaxIndex = cameraSetup.cameraModels.Length - 1; PreviewWindow.DisplayImage(_colorCallerName, _previewCameraManager.targetTexture, previewMaxIndex); // If depth data, or mesh data, is to be acquired, display a depth preview. if (_acquireDepthData || _copyGlobalMesh) { // Render actual depth into a precise depth texture. GeneralToolkit.CreateRenderTexture(ref _targetDepthTexture, cameraParams.pixelResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp); _previewCameraManager.RenderPreviewToTarget(ref _targetDepthTexture, true); // Encode the depth texture into a color texture, using a colormap suited for visualization. if (_distanceToColorMat == null) { _distanceToColorMat = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor); } _distanceToColorMat.SetInt(shaderNameIsPrecise, 0); GeneralToolkit.CreateRenderTexture(ref _distanceAsColorTexture, cameraParams.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp); Graphics.Blit(_targetDepthTexture, _distanceAsColorTexture, _distanceToColorMat); // Display the texture in the preview window. PreviewWindow.DisplayImage(_depthCallerName, _distanceAsColorTexture, previewMaxIndex); // Reset the active render texture. RenderTexture.active = null; } } }
/// <summary> /// Enables the user to launch dense 3D reconstruction and meshing via COLMAP. /// </summary> /// <param name="workspace"></param> The workspace from which to perform this method. private void SubsectionDenseReconstruction() { EditorGUILayout.Space(); string workspace = dataHandler.dataDirectory; string label = "Reconstruct 3D mesh (.PLY) from sparse camera setup."; string tooltip = "Processed geometry will be stored at: \"" + COLMAPConnector.GetDelaunayFile(workspace) + "\"."; // Check if this option is available. bool isGUIEnabled = GUI.enabled; GUI.enabled = isGUIEnabled && (cameraSetup != null && cameraSetup.cameraModels != null) && workspace.Contains(COLMAPConnector.dense0DirName) && Application.isPlaying; GeneralToolkit.EditorRequirePlayMode(ref tooltip); // Display a button to launch the helper method. bool hasPressed = GeneralToolkit.EditorWordWrapLeftButton(new GUIContent("Run", tooltip), new GUIContent(label, tooltip)); // If the button is pressed, display a dialog to confirm. if (hasPressed) { label = "Existing data will be erased. Are you ready to proceed?"; tooltip = "Launching this process will erase data in the folder: \"" + workspace + "\". Are you ready to proceed?"; // If the user confirms, update the workspace directory and launch the method. if (EditorUtility.DisplayDialog(label, tooltip, "Yes", "No")) { StartCoroutine(COLMAPConnector.RunDenseReconstructionCoroutine(this, workspace)); } } // Reset the GUI. GUI.enabled = isGUIEnabled; EditorGUILayout.Space(); }
/// <summary> /// Remove camera models on destroy. /// </summary> void OnDestroy() { if (!GeneralToolkit.IsStartingNewScene()) { ResetCameraModels(); } }
/// <inheritdoc/> protected override IEnumerator ExecuteMethodCoroutine() { // Reset the progress bar. GeneralToolkit.ResetCancelableProgressBar(true, false); // Initialize the compute shader's properties. InitializePerCall(); // Create a mesh for each source depth map. perViewMeshes = new Mesh[cameraSetup.cameraModels.Length]; for (int sourceIndex = 0; sourceIndex < perViewMeshes.Length; sourceIndex++) { // Update the progress bar, and enable the user to cancel the process. DisplayAndUpdateCancelableProgressBar(); if (GeneralToolkit.progressBarCanceled) { processingCaller.processingCanceled = true; break; } // Process the depth map. ProcessDepthImage(sourceIndex); yield return(null); } //Releases the compute shader. ReleasePerCall(); // Reset the progress bar. GeneralToolkit.ResetCancelableProgressBar(true, false); }
/// <summary> /// Creates or resets a camera setup object as a child of the given transform. /// </summary> /// <param name="parentTransform"></param> The parent transform. /// <returns></returns> The camera setup object. public static CameraSetup CreateOrResetCameraSetup(Transform parentTransform = null) { CameraSetup existingSetup = GeneralToolkit.GetOrCreateChildComponent <CameraSetup>(parentTransform); existingSetup.Reset(); return(existingSetup); }
/// <summary> /// Creates mesh assets for the depth image specified by the given index. /// </summary> /// <param name="acquisitionIndex"></param> The index of the depth image. /// <returns></returns> private void ProcessDepthImage(int acquisitionIndex) { // Check if the asset has already been processed. string bundledAssetName = dataHandler.GetBundledAssetName(this, perViewMeshAssetPrefix + GeneralToolkit.ToString(acquisitionIndex)); string meshRelativePath = Path.Combine(GeneralToolkit.tempDirectoryRelativePath, bundledAssetName + ".asset"); if (dataHandler.IsAssetAlreadyProcessed(meshRelativePath)) { return; } // Update the camera model. cameraModel = cameraSetup.cameraModels[acquisitionIndex]; // Initialize the distance map texture, and load the depth data into it. InitializeDistanceMap(); string imageName = cameraModel.imageName; string imagePath = Path.Combine(dataHandler.depthDirectory, imageName); GeneralToolkit.LoadTexture(imagePath, ref distanceMap); // Compute a mesh from the distance map. Mesh outMesh; ComputeMesh(out outMesh); // Save this mesh as an asset. AssetDatabase.CreateAsset(outMesh, meshRelativePath); AssetDatabase.Refresh(); // Store the per-view mesh into the final array. Mesh meshAsset = AssetDatabase.LoadAssetAtPath <Mesh>(meshRelativePath); perViewMeshes[acquisitionIndex] = (Mesh)Instantiate(meshAsset); }
/// <summary> /// Checks the source data directory for color images, depth maps, and meshes. /// </summary> public void CheckStatusOfSourceData() { string[] extensions = new string[] { ".png", ".jpg" }; // Check the color directory for color images. if (Directory.Exists(colorDirectory)) { sourceColorCount = GeneralToolkit.GetFilesByExtension(colorDirectory, extensions).Length; } else { sourceColorCount = 0; } // Check the depth directory for depth maps. if (Directory.Exists(depthDirectory)) { sourcePerViewCount = GeneralToolkit.GetFilesByExtension(depthDirectory, extensions).Length; } else { sourcePerViewCount = 0; } extensions = new string[] { ".asset", ".obj", ".fbx" }; // Check the root directory for meshes. sourceGlobalCount = GeneralToolkit.GetFilesByExtension(dataDirectory, extensions).Length; // Compile all of this information into an output string for the processing caller. if (_processingCaller != null) { string colorInfo = sourceColorCount + " color image" + ((sourceColorCount == 1) ? string.Empty : "s") + ", "; string depthInfo = sourcePerViewCount + " depth map" + ((sourcePerViewCount == 1) ? string.Empty : "s") + ", "; string meshInfo = sourceGlobalCount + " mesh" + ((sourceGlobalCount == 1) ? string.Empty : "es") + "."; _processingCaller.sourceDataInfo = "This directory contains: " + colorInfo + depthInfo + meshInfo; } }