示例#1
0
        /// <summary>
        /// adjusts qtrees parameters per global scaling configuration
        /// </summary>
        /// <param name="qtreeWrapper"></param>
        private LASFile adjustParameters(LASFile lasfile)
        {
            double b = lasfile.header.MaxX - lasfile.header.MinX;

            lasfile.header.MinX *= LasDataManager.GlobalPointsScaleFactor.x;
            lasfile.header.MaxX *= LasDataManager.GlobalPointsScaleFactor.x;

            b = lasfile.header.MaxX - lasfile.header.MinX;

            b = lasfile.header.MaxY - lasfile.header.MinY;
            lasfile.header.MinY *= LasDataManager.GlobalPointsScaleFactor.y;
            lasfile.header.MaxY *= LasDataManager.GlobalPointsScaleFactor.y;
            b = lasfile.header.MaxY - lasfile.header.MinY;

            b = lasfile.header.MaxZ - lasfile.header.MinZ;
            lasfile.header.MinZ *= LasDataManager.GlobalPointsScaleFactor.z;
            lasfile.header.MaxZ *= LasDataManager.GlobalPointsScaleFactor.z;
            b = lasfile.header.MaxZ - lasfile.header.MinZ;

            return(lasfile);
        }
示例#2
0
        public static void Stage(FileInfo file, LASFile las)
        {
            var dir = Path.Combine(
                Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
                "las-index");

            if (!Directory.Exists(dir))
            {
                Directory.CreateDirectory(dir);
            }

            var index = new Indexer
            {
                FullName         = file.FullName,
                LastWriteTimeUtc = file.LastWriteTimeUtc,
                las = las
            };


            File.WriteAllText(
                $"{Path.Combine(dir, $"{Guid.NewGuid()}.index.json")}",
                JsonConvert.SerializeObject(index));
        }
示例#3
0
        public PointCloudTileSource TilePointFileIndex(LASFile tiledFile, BufferInstance segmentBuffer, ProgressManager progressManager)
        {
            var stopwatch = new Stopwatch();

            stopwatch.Start();

            var analysis        = AnalyzePointFile(segmentBuffer.Length, progressManager);
            var quantizedExtent = m_source.QuantizedExtent;
            var tileCounts      = analysis.Density.GetTileCountsForInitialization();

            var fileSize = tiledFile.PointDataOffset + (m_source.PointSizeBytes * m_source.Count);

            AttemptFastAllocate(tiledFile.FilePath, fileSize);

            var lowResPointCountMax = PROPERTY_MAX_LOWRES_POINTS.Value;
            var lowResBuffer        = BufferManager.AcquireBuffer(m_id, lowResPointCountMax * m_source.PointSizeBytes);
            var lowResWrapper       = new PointBufferWrapper(lowResBuffer, m_source.PointSizeBytes, lowResPointCountMax);

            var validTiles          = analysis.GridIndex.Sum(r => r.GridRange.ValidCells);
            var lowResPointsPerTile = lowResPointCountMax / validTiles;
            var lowResTileSize      = (ushort)Math.Sqrt(lowResPointsPerTile);

            var lowResGrid = Grid <int> .Create(lowResTileSize, lowResTileSize, true, -1);

            var lowResCounts = tileCounts.Copy <int>();

            using (var outputStream = StreamManager.OpenWriteStream(tiledFile.FilePath, fileSize, tiledFile.PointDataOffset))
            {
                var i = 0;
                foreach (var segment in analysis.GridIndex)
                {
                    progressManager.Log("~ Processing Index Segment {0}/{1}", ++i, analysis.GridIndex.Count);

                    var sparseSegment        = m_source.CreateSparseSegment(segment);
                    var sparseSegmentWrapper = new PointBufferWrapper(segmentBuffer, sparseSegment);

                    var tileRegionFilter = new TileRegionFilter(tileCounts, quantizedExtent, segment.GridRange);

                    // this call will fill the buffer with points, add the counts, and sort
                    QuantTilePointsIndexed(sparseSegment, sparseSegmentWrapper, tileRegionFilter, tileCounts, lowResWrapper, lowResGrid, lowResCounts, progressManager);
                    var segmentFilteredPointCount = tileRegionFilter.GetCellOrdering().Sum(t => tileCounts.Data[t.Row, t.Col]);
                    var segmentFilteredBytes      = segmentFilteredPointCount * sparseSegmentWrapper.PointSizeBytes;

                    // write out the buffer
                    using (var process = progressManager.StartProcess("WriteIndexSegment"))
                    {
                        var segmentBufferIndex = 0;
                        foreach (var tile in segment.GridRange.GetCellOrdering())
                        {
                            var tileCount = tileCounts.Data[tile.Row, tile.Col];
                            if (tileCount > 0)
                            {
                                var tileSize = (tileCount - lowResCounts.Data[tile.Row, tile.Col]) * sparseSegmentWrapper.PointSizeBytes;
                                outputStream.Write(sparseSegmentWrapper.Data, segmentBufferIndex, tileSize);
                                segmentBufferIndex += tileSize;

                                if (!process.Update((float)segmentBufferIndex / segmentFilteredBytes))
                                {
                                    break;
                                }
                            }
                        }
                    }

                    if (progressManager.IsCanceled())
                    {
                        break;
                    }
                }

                // write low-res
                var lowResActualPointCount = lowResCounts.Data.Cast <int>().Sum();
                outputStream.Write(lowResWrapper.Data, 0, lowResActualPointCount * lowResWrapper.PointSizeBytes);
            }

            var actualDensity = new PointCloudTileDensity(tileCounts, m_source.Quantization);
            var tileSet       = new PointCloudTileSet(m_source, actualDensity, tileCounts, lowResCounts);
            var tileSource    = new PointCloudTileSource(tiledFile, tileSet, analysis.Statistics);

            if (!progressManager.IsCanceled())
            {
                tileSource.IsDirty = false;
            }

            tileSource.WriteHeader();

            return(tileSource);
        }
示例#4
0
        public PointCloudTileSource Process(ProgressManager progressManager)
        {
            progressManager.Log("<= {0}", m_inputHandler.FilePath);

            PerformanceManager.Start(m_inputHandler.FilePath);

            // check for existing tile source
            LoadFromCache(progressManager);

            if (m_tileSource == null)
            {
                using (var process = progressManager.StartProcess("ProcessSet"))
                {
                    m_binarySource = m_inputHandler.GenerateBinarySource(progressManager);
                    m_tiledHandler = LASFile.Create(m_tiledHandler.FilePath, m_binarySource);

                    using (var segmentBuffer = BufferManager.AcquireBuffer(m_id, (int)PROPERTY_SEGMENT_SIZE.Value, true))
                    {
                        var tileManager = new PointCloudTileManager(m_binarySource);
                        m_tileSource = tileManager.TilePointFileIndex(m_tiledHandler, segmentBuffer, progressManager);
                    }

#warning this was for xyz, but I have not yet re-implemented that anyway
                    //if (m_binarySource.FilePath != m_inputHandler.FilePath)
                    //    File.Delete(m_binarySource.FilePath);

                    if (m_tileSource.IsDirty)
                    {
                        m_tileSource.Close();
                        File.Delete(m_tileSource.FilePath);
                        m_tileSource = null;

                        process.LogTime("=> Processing Cancelled");
                    }
                    else
                    {
                        process.LogTime("=> Processing Completed");
                    }
                }

                GC.Collect();
            }

            TransferRate averageReadSpeed  = PerformanceManager.GetReadSpeed();
            TransferRate averageWriteSpeed = PerformanceManager.GetWriteSpeed();

            Context.WriteLine("IO Read Speed: {0}", averageReadSpeed);
            Context.WriteLine("IO Write Speed: {0}", averageWriteSpeed);

            //{
            //    // test
            //    Stopwatch stopwatch = new Stopwatch();
            //    stopwatch.Start();

            //    PointCloudTile tempTile = m_tileSource.TileSet[0, 0];
            //    Grid<float> grid = new Grid<float>(tempTile.Extent, 540, (float)m_tileSource.Extent.MinZ - 1.0f, true);
            //    Grid<uint> quantizedGrid = new Grid<uint>(grid.SizeX, grid.SizeY, m_tileSource.Extent, true);

            //    using (GridTileSource<float> gridSource = new GridTileSource<float>(m_tiledPath + ".grid", grid.SizeX, grid.SizeY, m_tileSource.TileSet.Cols, m_tileSource.TileSet.Rows))
            //    {
            //        int tempBufferSize = (int)(m_tileSource.TileSet.Max(t => t.PointCount));
            //        byte[] tempBuffer = new byte[tempBufferSize * m_tileSource.PointSizeBytes];

            //        foreach (PointCloudTile tile in m_tileSource)
            //        {
            //            m_tileSource.LoadTileGrid(tile, tempBuffer, grid, quantizedGrid);
            //            gridSource.WriteTile(tile.Col, tile.Row, grid.Data);

            //            if (!progressManager.Update((float)tile.Index / m_tileSource.TileSet.TileCount))
            //                break;
            //        }

            //        //gridSource.ReadTile(tempTile.Col, tempTile.Row, grid.Data);
            //    }
            //    m_tileSource.Close();

            //    progressManager.Log(stopwatch, "Generated GRID");
            //}

            return(m_tileSource);
        }
示例#5
0
        internal ModelImportStatus BatchImportLasFiles(string[] filePaths, Guid currentProjectID)
        {
            ModelImportStatus finalStatus = new ModelImportStatus();

            LASBatchImportTools ll       = new LASBatchImportTools();
            List <string>       messages = new List <string>();
            int    importCount           = 0;
            int    failCount             = 0;
            string report = "";
            Dictionary <string, ModelImportStatus> mosList = new Dictionary <string, ModelImportStatus>();
            bool reportStatus = false;

            if (this.backgroundWorker != null)
            {
                reportStatus = true;
            }


            int fileCount   = filePaths.Length;
            int thisFileNum = 0;

            var dataDict = new Dictionary <string, List <object> >();

            foreach (string file in filePaths)
            {
                var    data = new List <object>();
                double pct  = ((double)thisFileNum / (double)fileCount) * 100.0;
                thisFileNum++;


                if (reportStatus)
                {
                    backgroundWorker.ReportProgress((int)pct, "Processing las file " + thisFileNum + " of " + fileCount + ", " + file);
                }

                ModelImportStatus mis = new ModelImportStatus();

                NKD.Import.Client.Processing.LASImport li = new NKD.Import.Client.Processing.LASImport();
                LASFile lf = li.GetLASFile(file, mis);
                if (lf == null)
                {
                    mis.errorMessages.Add("Failed to load LAS file " + file);
                    mosList.Add(file, mis);
                    continue;
                }

                data = ll.ProcessLASFile(lf, file, mis, currentProjectID, this.backgroundWorker);
                string msg = "";
                if (msg != null)
                {
                    messages.Add(msg);
                    report += msg + "\n";
                    failCount++;
                }
                else
                {
                    importCount++;
                }

                mosList.Add(file, mis);
                dataDict.Add(file, data);

                //if (thisFileNum % 2 == 0 || (filePaths.Length-thisFileNum) < 1) //FIXME magic number, should look at used memory and make a choice on that
                //{
                //insert into DB to avoid memory issues
                //var subdict = dataDict;
                PushToDB(dataDict);
                dataDict = new Dictionary <string, List <object> >();

                //PushToDB(subdict);
                //subdict = null;

                GC.Collect();
                GC.WaitForPendingFinalizers();
                //}
            }

            string finalReport = "Immport status:\nFiles imported:" + importCount + "\nFailed files:" + failCount + "\n\nMessages:\n";

            finalReport += report;
            int totRecordsAddedCount = 0;
            int totLinesReadCount    = 0;

            foreach (KeyValuePair <string, ModelImportStatus> kvp in mosList)
            {
                string            lfName = kvp.Key;
                ModelImportStatus ms     = kvp.Value;
                totRecordsAddedCount += ms.recordsAdded;
                totLinesReadCount    += ms.linesReadFromSource;
                if (ms.finalErrorCode != ModelImportStatus.OK)
                {
                    finalStatus.finalErrorCode = ModelImportStatus.GENERAL_LOAD_ERROR;
                }
                foreach (string m in ms.warningMessages)
                {
                    finalStatus.warningMessages.Add(m);
                }
                foreach (string m in ms.errorMessages)
                {
                    finalStatus.errorMessages.Add(m);
                }
            }
            finalStatus.linesReadFromSource = totLinesReadCount;
            finalStatus.recordsAdded        = totRecordsAddedCount;

            return(finalStatus);
        }