/// <summary> /// Reads the mesh's face count from a dedicated debug log. /// </summary> /// <param name="sendingProcess"></param> The process sending the log. /// <param name="outLine"></param> The log's output line. private void ReadMeshFaceCount(object sendingProcess, System.Diagnostics.DataReceivedEventArgs outLine) { string line = outLine.Data; if (!string.IsNullOrEmpty(line) && line.Contains("FACE_COUNT_OUTPUT:")) { meshFaceCount = GeneralToolkit.ParseInt(line.Split(':')[1]); } }
/// <summary> /// Reads images information from a COLMAP "images.txt" file, and saves it into the referenced array. /// Note that the array contains a camera element for each image. Array elements will be initialized here. /// </summary> /// <param name="cameraSetup"></param> The camera setup to which to output the list of parsed camera models. /// <param name="workspace"></param> The workspace from which to work. public static void ReadImagesInformation(CameraSetup cameraSetup, string workspace) { List <Vector3> positionList = new List <Vector3>(); List <Quaternion> rotationList = new List <Quaternion>(); List <string> fileNameList = new List <string>(); List <int> cameraIDList = new List <int>(); // Read COLMAP's images file. using (StreamReader reader = File.OpenText(GetImagesFile(workspace))) { bool isOdd = false; string line; // Read the file line-by-line to the end. while ((line = reader.ReadLine()) != null) { // Skip the lines from the header, that start with #. if (!line.StartsWith("#")) { // If the line is odd, skip it, and indicate that the next line will be even. if (isOdd) { isOdd = false; } // If the line is even, parse it, and indicate that the next line will be odd. else { string[] split = line.Split(' '); // COLMAP's images should have 10 parameters. if (split.Length > 9) { // Parse position and rotation, and convert them to Unity's coordinate system. Quaternion rotation = new Quaternion(GeneralToolkit.ParseFloat(split[2]), GeneralToolkit.ParseFloat(split[3]), GeneralToolkit.ParseFloat(split[4]), GeneralToolkit.ParseFloat(split[1])); Vector3 position = new Vector3(GeneralToolkit.ParseFloat(split[5]), GeneralToolkit.ParseFloat(split[6]), GeneralToolkit.ParseFloat(split[7])); ConvertCoordinatesCOLMAPToUnity(ref position, ref rotation); // Add all the parameters to the dedicated lists. positionList.Add(position); rotationList.Add(rotation); fileNameList.Add(split[9]); cameraIDList.Add(GeneralToolkit.ParseInt(split[8])); // Indicate that the next line will be odd. isOdd = true; } } } } reader.Close(); } // Use these lists to create and fill the output array of camera models. cameraSetup.cameraModels = new CameraModel[positionList.Count]; for (int iter = 0; iter < positionList.Count; iter++) { CameraModel cameraModel = cameraSetup.AddCameraModel(iter); cameraModel.SetCameraReferenceIndexAndImageName(cameraIDList[iter], fileNameList[iter]); cameraModel.transform.localPosition = positionList[iter]; cameraModel.transform.localRotation = rotationList[iter]; } }
/// <summary> /// Parses the basic parameters of any COLMAP camera model. /// </summary> /// <param name="split"></param> The split string from which to parse information. /// <param name="isOmnidirectional"></param> True if the camera model is omnidirectional, false otherwise. /// <returns></returns> A camera model containing the parsed parameters. private static CameraModel BasicParse(string[] split, bool isOmnidirectional) { CameraModel cameraModel = CameraModel.CreateCameraModel(); // The camera's projection type is not handled by COLMAP, and has to be specified. cameraModel.isOmnidirectional = isOmnidirectional; // The camera's index is given by the first parameter in the .txt file. cameraModel.SetCameraReferenceIndexAndImageName(GeneralToolkit.ParseInt(split[0]), cameraModel.imageName); // The camera's pixel resolution is given by the second and third parameters in the .txt file. cameraModel.pixelResolution = new Vector2Int(GeneralToolkit.ParseInt(split[2]), GeneralToolkit.ParseInt(split[3])); // Return the camera model. return(cameraModel); }
/// <summary> /// Loads the processed per-view meshes for play. /// </summary> /// <returns></returns> public IEnumerator LoadProcessedPerViewMeshesCoroutine() { CameraModel[] cameraModels = cameraSetup.cameraModels; perViewMeshes = new Mesh[cameraModels.Length]; perViewMeshTransforms = new Transform[cameraModels.Length]; // Check that the application is playing. if (!Application.isPlaying) { yield break; } // Only continue if there are source images. if (cameraModels.Length > 0) { // Create a gameobject for the geometric data, and set it as a child of this transform. Transform geometricDataTransform = new GameObject("Geometric data").transform; geometricDataTransform.parent = dataHandler.transform; // Reset the geometric data's local position, rotation, and scale, to fit that of the parent object. GeneralToolkit.SetTransformValues(geometricDataTransform.transform, true, Vector3.zero, Quaternion.identity, Vector3.one); // Check each bundled asset name for the prefix corresponding to this class. string bundledAssetsPrefix = DataHandler.GetBundledAssetPrefixFromType(typeof(PerViewMeshesQSTR)); foreach (string bundledAssetName in dataHandler.bundledAssetsNames) { // If the correct asset name is found, load the per-view mesh. if (bundledAssetName.Contains(bundledAssetsPrefix)) { Mesh processedPerViewMesh = new Mesh(); yield return(dataHandler.StartCoroutine(dataHandler.LoadAssetsFromBundleCoroutine <Mesh>((result => processedPerViewMesh = result[0]), bundledAssetName))); // Create a gameobject for the mesh, and set it as a child of the geometric data. Transform meshTransform = new GameObject(processedPerViewMesh.name).transform; meshTransform.parent = geometricDataTransform; // Link the per-view mesh to the gameobject. meshTransform.gameObject.AddComponent <MeshFilter>().sharedMesh = processedPerViewMesh; // Determine the source camera index. string assetName = bundledAssetName.Replace(bundledAssetsPrefix, string.Empty); string sourceCamIndexString = assetName.Replace(perViewMeshAssetPrefix, string.Empty); int sourceCamIndex = GeneralToolkit.ParseInt(sourceCamIndexString); // Determine the mesh's transform values from the camera model. CameraModel cameraModel = cameraModels[sourceCamIndex]; Vector3 meshPosition = cameraModel.transform.position; Quaternion meshRotation = cameraModel.transform.rotation; Vector3 meshScale = Vector3.one; // Set the mesh's transform to the computed values. GeneralToolkit.SetTransformValues(meshTransform, false, meshPosition, meshRotation, meshScale); // Assign to the output arrays. perViewMeshes[sourceCamIndex] = processedPerViewMesh; perViewMeshTransforms[sourceCamIndex] = meshTransform; } } } }
/// <summary> /// Reads the stored additional information. /// </summary> /// <param name="cameraSetup"></param> The camera setup to modify with the parsed information. public void ReadCOLIBRIVRAdditionalInformation(CameraSetup cameraSetup) { string[] lines = File.ReadAllLines(additionalInfoFile); for (int i = 0; i < lines.Length; ++i) { if (lines[i].Contains("INITIAL_VIEWING_POSITION")) { string[] split = lines[++i].Split(' '); if (split.Length == 3) { Vector3 newInitialViewingPosition = new Vector3(GeneralToolkit.ParseFloat(split[0]), GeneralToolkit.ParseFloat(split[1]), GeneralToolkit.ParseFloat(split[2])); cameraSetup.SetAdditionalParameters(newInitialViewingPosition); } continue; } if (lines[i].Contains("DISTANCE_RANGE")) { while (i + 1 < lines.Length && !lines[i + 1].StartsWith("#")) { string[] split = lines[++i].Split(' '); if (split.Length == 3) { int cameraReferenceIndex = GeneralToolkit.ParseInt(split[0]); Vector2 newDistanceRange = new Vector2(GeneralToolkit.ParseFloat(split[1]), GeneralToolkit.ParseFloat(split[2])); // Find the referenced camera and set its distance range foreach (CameraModel camera in cameraSetup.cameraModels) { if (camera.cameraReferenceIndex == cameraReferenceIndex) { camera.distanceRange = newDistanceRange; } } } } } } }
/// <summary> /// Loads the processed texture map for play. /// </summary> /// <returns></returns> public IEnumerator LoadProcessedTextureMapsCoroutine() { // Check that the application is playing. if (!Application.isPlaying) { yield break; } // Count the number of texture maps to load by checking each bundled asset name for the prefix corresponding to this class. string bundledAssetsPrefix = DataHandler.GetBundledAssetPrefixFromType(typeof(GlobalTextureMap)); int textureMapCount = 0; foreach (string bundledAssetName in dataHandler.bundledAssetsNames) { if (bundledAssetName.Contains(bundledAssetsPrefix)) { textureMapCount++; } } // If there are texture maps to load, load them. if (textureMapCount > 0) { textureMaps = new Texture2D[textureMapCount]; foreach (string bundledAssetName in dataHandler.bundledAssetsNames) { if (bundledAssetName.Contains(bundledAssetsPrefix)) { // Determine the texture map index. string assetName = bundledAssetName.Replace(bundledAssetsPrefix, string.Empty); string textureMapIndexString = assetName.Replace(textureMapAssetPrefix, string.Empty); int textureMapIndex = GeneralToolkit.ParseInt(textureMapIndexString); // Load the texture map. yield return(dataHandler.StartCoroutine(dataHandler.LoadAssetsFromBundleCoroutine <Texture2D>((result => textureMaps[textureMapIndex] = result[0]), bundledAssetName))); } } } }