/// <summary> /// This creates a window from this raster. The window will still save to the same /// source file, but only has access to a small window of data, so it can be loaded like a buffer. /// The georeferenced extents will be for the new window, not the original raster. startRow and endRow /// will exist in the new raster, however, so that it knows how to copy values back to the original raster. /// </summary> /// <param name="startRow">The 0 based integer index of the top row to get from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to get from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to get from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to get from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public new IRaster GetWindow(int startRow, int endRow, int startColumn, int endColumn, bool inRam) { int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; var result = new BinaryRaster <T>(); result.Filename = Filename; result.Projection = Projection; result.DataType = typeof(int); result.NumRows = endRow - startRow + 1; result.NumColumns = endColumn - startColumn + 1; result.NumRowsInFile = NumRowsInFile; result.NumColumnsInFile = NumColumnsInFile; result.NoDataValue = NoDataValue; result.StartColumn = startColumn + StartColumn; result.StartRow = startRow + StartRow; result.EndColumn = endColumn + StartColumn; result.EndRow = EndRow + StartRow; // Reposition the "raster" so that it matches the window, not the whole raster // X = [0] + [1] * column + [2] * row; // Y = [3] + [4] * column + [5] * row; result.Bounds = new RasterBounds(result.NumRows, result.NumColumns, new double[6]); result.Bounds.AffineCoefficients[0] = Bounds.AffineCoefficients[0] + Bounds.AffineCoefficients[1] * startColumn + Bounds.AffineCoefficients[2] * startRow; result.Bounds.AffineCoefficients[1] = Bounds.AffineCoefficients[1]; result.Bounds.AffineCoefficients[2] = Bounds.AffineCoefficients[2]; result.Bounds.AffineCoefficients[3] = Bounds.AffineCoefficients[3] + Bounds.AffineCoefficients[4] * startColumn + Bounds.AffineCoefficients[5] * startRow; result.Bounds.AffineCoefficients[4] = Bounds.AffineCoefficients[4]; result.Bounds.AffineCoefficients[5] = Bounds.AffineCoefficients[5]; // Now we can copy any values currently in memory. if (IsInRam) { //result.ReadHeader(Filename); result.Data = new T[numRows][]; ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, endRow); pm.StartValue = startRow; // copy values directly using both data structures for (int row = 0; row < numRows; row++) { result.Data[row] = new T[numCols]; for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); } else { result.OpenWindow(Filename, startRow, endRow, startColumn, endColumn, inRam); } result.Value = new ValueGrid <T>(result); return(result); }
// ------------------------------------------FROM AND TO IN RAM ONLY ----------------- /// <summary> /// This creates a completely new raster from the windowed domain on the original raster. This new raster /// will not have a source file, and values like NumRowsInFile will correspond to the in memory version. /// All the values will be copied to the new source file. InRam must be true at this level. /// </summary> /// <param name="fileName"></param> /// <param name="startRow">The 0 based integer index of the top row to copy from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to copy from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to copy from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to copy from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="copyValues">If this is true, the values are saved to the file. If this is false and the data can be loaded into Ram, no file handling is done. Otherwise, a file of NoData values is created.</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public IRaster CopyWindow(string fileName, int startRow, int endRow, int startColumn, int endColumn, bool copyValues, bool inRam) { if (inRam == false || (endColumn - startColumn + 1) * (endRow - startRow + 1) > 64000000) throw new ArgumentException(DataStrings.RasterRequiresCast); if (IsInRam == false) throw new ArgumentException(DataStrings.RasterRequiresCast); int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; var result = new Raster<T>(numRows, numCols); result.Projection = Projection; // The affine coefficients defining the world file are the same except that they are translated over. Only the position of the // upper left corner changes. Everything else is the same as the previous raster. result.Bounds.AffineCoefficients = new AffineTransform(Bounds.AffineCoefficients).TransfromToCorner(startColumn, startRow); ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, numRows); // copy values directly using both data structures for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); result.Value = new ValueGrid<T>(result); return result; }
/// <summary> /// Fires the FinishedReading event. /// </summary> protected virtual void OnFinishedReading() { IsFinishedReading = true; ProgressMeter.Reset(); _buffer = null; FinishedReading?.Invoke(this, EventArgs.Empty); }
/// <summary> /// This writes a window of byte values (ARGB order) to the file. This assumes that the headers already exist. /// If the headers have not been created or the bounds extend beyond the header numRows and numColumns for the /// specified scale, this will throw an exception. /// </summary> /// <param name="bytes">The byte array</param> /// <param name="startRow">The integer start row</param> /// <param name="startColumn">The integer start column</param> /// <param name="numRows">The integer number of rows in the window</param> /// <param name="numColumns">The integer number of columns in the window</param> /// <param name="scale">The integer scale. 0 is the original image.</param> /// <exception cref="PyramidUndefinedHeaderException">Occurs when attempting to write data before the headers are defined</exception> /// <exception cref="PyramidOutOfBoundsException">Occurs if the range specified is outside the bounds for the specified image scale</exception> public void WriteWindow(byte[] bytes, int startRow, int startColumn, int numRows, int numColumns, int scale) { ProgressMeter pm = new ProgressMeter(ProgressHandler, "Saving Pyramid Values", numRows); WriteWindow(bytes, startRow, startColumn, numRows, numColumns, scale, pm); pm.Reset(); }
/// <summary> /// Populates the given streams for the shp and shx file when not in IndexMode. /// </summary> /// <param name="shpStream">Stream that is used to write the shp file.</param> /// <param name="shxStream">Stream that is used to write the shx file.</param> /// <returns>The lengths of the streams in bytes.</returns> private StreamLengthPair PopulateShpAndShxStreamsNotIndexed(Stream shpStream, Stream shxStream) { var progressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words foreach (IFeature f in Features) { bool isNullShape = false; int contentLength; if (f.Geometry.IsEmpty) { contentLength = 2; isNullShape = true; } else { contentLength = GetContentLength(Header.ShapeType); } shxStream.WriteBe(offset); shxStream.WriteBe(contentLength); shpStream.WriteBe(fid + 1); shpStream.WriteBe(contentLength); if (isNullShape) { shpStream.WriteLe((int)ShapeType.NullShape); // Byte 8 Shape Type 0 Integer 1 Little } else { shpStream.WriteLe((int)Header.ShapeType); // Byte 8 Shape Type Integer 1 Little Coordinate c = f.Geometry.Coordinates[0]; shpStream.WriteLe(c.X); shpStream.WriteLe(c.Y); if (Header.ShapeType == ShapeType.PointZ) { shpStream.WriteLe(c.Z); } if (Header.ShapeType == ShapeType.PointM || Header.ShapeType == ShapeType.PointZ) { shpStream.WriteLe(c.M); } } progressMeter.CurrentValue = fid; fid++; offset += 4; // header bytes offset += contentLength; // adding the content length from each loop calculates the word offset } progressMeter.Reset(); return(new StreamLengthPair { ShpLength = offset, ShxLength = 50 + fid * 4 }); }
/// <summary> /// This creates an IN MEMORY ONLY window from the in-memory window of this raster. If, however, the requested range /// is outside of what is contained in the in-memory portions of this raster, an appropriate cast /// is required to ensure that you have the correct File handling, like a BinaryRaster etc. /// </summary> /// <param name="startRow">The 0 based integer index of the top row to get from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to get from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to get from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to get from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public IRaster GetWindow(int startRow, int endRow, int startColumn, int endColumn, bool inRam) { if (IsInRam == false) { throw new ArgumentException(DataStrings.RasterRequiresCast); } if (startRow < StartRow || endRow > EndRow || StartColumn < startColumn || EndColumn > endColumn) { // the requested extents are outside of the extents that have been windowed into ram. File Handling is required. throw new ArgumentException(DataStrings.RasterRequiresCast); } int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; Raster <T> result = new Raster <T>(numRows, numCols) { Filename = Filename, Projection = Projection, DataType = typeof(int), NumRows = numRows, NumColumns = numCols, NumRowsInFile = NumRowsInFile, NumColumnsInFile = NumColumnsInFile, NoDataValue = NoDataValue, StartColumn = startColumn, StartRow = startRow, EndColumn = endColumn, EndRow = EndRow, FileType = FileType, Bounds = { AffineCoefficients = new AffineTransform(Bounds.AffineCoefficients).TransfromToCorner(startColumn, startRow) } }; // Reposition the new "raster" so that it matches the specified window, not the whole raster // Now we can copy any values currently in memory. ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, endRow) { StartValue = startRow }; // copy values directly using both data structures for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); result.Value = new ValueGrid <T>(result); return(result); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids2() { double count = Header.ImageHeaders[0].NumRows; ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", count); int prog = 0; for (int scale = 0; scale < Header.ImageHeaders.Length - 1; scale++) { PyramidImageHeader ph = Header.ImageHeaders[scale]; int rows = ph.NumRows; int cols = ph.NumColumns; // Horizontal Blur Pass byte[] r1 = ReadWindow(0, 0, 1, cols, scale); byte[] r2 = ReadWindow(1, 0, 1, cols, scale); byte[] vals = Blur(null, r1, r2); vals = DownSample(vals); WriteWindow(vals, 0, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; byte[] r3 = ReadWindow(2, 0, 1, cols, scale); vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, 1, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; for (int row = 3; row < rows - 1; row++) { r1 = r2; r2 = r3; r3 = ReadWindow(row, 0, 1, cols, scale); prog++; pm.CurrentValue = prog; if (row % 2 == 1) { continue; } vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, (row / 2) - 1, 0, 1, cols / 2, scale + 1); } if ((rows - 1) % 2 == 0) { vals = Blur(r2, r3, r2); vals = DownSample(vals); WriteWindow(vals, (rows / 2) - 1, 0, 1, cols / 2, scale + 1); } prog++; pm.CurrentValue = prog; } pm.Reset(); }
/// <summary> /// Fires the FinishedReading event. /// </summary> protected virtual void OnFinishedReading() { _isFinishedReading = true; _progressMeter.Reset(); _buffer = null; if (FinishedReading == null) { return; } FinishedReading(this, EventArgs.Empty); }
/// <summary> /// This creates a window from this raster. The window will still save to the same /// source file, but only has access to a small window of data, so it can be loaded like a buffer. /// The georeferenced extents will be for the new window, not the original raster. startRow and endRow /// will exist in the new raster, however, so that it knows how to copy values back to the original raster. /// </summary> /// <param name="startRow">The 0 based integer index of the top row to get from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to get from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to get from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to get from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public new IRaster GetWindow(int startRow, int endRow, int startColumn, int endColumn, bool inRam) { int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; var result = new BinaryRaster <T> { Filename = Filename, Projection = Projection, DataType = typeof(int), NumRows = endRow - startRow + 1, NumColumns = endColumn - startColumn + 1, NumRowsInFile = NumRowsInFile, NumColumnsInFile = NumColumnsInFile, NoDataValue = NoDataValue, StartColumn = startColumn + StartColumn, StartRow = startRow + StartRow, EndColumn = endColumn + StartColumn, EndRow = EndRow + StartRow }; // Reposition the "raster" so that it matches the window, not the whole raster var ac = new AffineTransform(Bounds.AffineCoefficients).TransfromToCorner(startColumn, startRow); result.Bounds = new RasterBounds(result.NumRows, result.NumColumns, ac); // Now we can copy any values currently in memory. if (IsInRam) { //result.ReadHeader(Filename); result.Data = new T[numRows][]; ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, endRow) { StartValue = startRow }; // copy values directly using both data structures for (int row = 0; row < numRows; row++) { result.Data[row] = new T[numCols]; for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); } else { result.OpenWindow(Filename, startRow, endRow, startColumn, endColumn, inRam); } result.Value = new ValueGrid <T>(result); return(result); }
/// <summary> /// This tests each feature of the input /// </summary> /// <param name="self">This featureSet</param> /// <param name="other">The featureSet to perform intersection with</param> /// <param name="joinType">The attribute join type</param> /// <param name="progHandler">A progress handler for status messages</param> /// <returns>An IFeatureSet with the intersecting features, broken down based on the join Type</returns> public static IFeatureSet Intersection(this IFeatureSet self, IFeatureSet other, FieldJoinType joinType, IProgressHandler progHandler) { IFeatureSet result = null; ProgressMeter pm = new ProgressMeter(progHandler, "Calculating Intersection", self.Features.Count); if (joinType == FieldJoinType.All) { result = CombinedFields(self, other); // Intersection is symmetric, so only consider I X J where J <= I if (!self.AttributesPopulated) { self.FillAttributes(); } if (!other.AttributesPopulated) { other.FillAttributes(); } for (int i = 0; i < self.Features.Count; i++) { IFeature selfFeature = self.Features[i]; List <IFeature> potentialOthers = other.Select(selfFeature.Geometry.EnvelopeInternal.ToExtent()); foreach (IFeature otherFeature in potentialOthers) { selfFeature.Intersection(otherFeature, result, joinType); } pm.CurrentValue = i; } pm.Reset(); } else if (joinType == FieldJoinType.LocalOnly) { if (!self.AttributesPopulated) { self.FillAttributes(); } result = new FeatureSet(); result.CopyTableSchema(self); result.FeatureType = self.FeatureType; if (other.Features != null && other.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", other.Features.Count); IFeature union = other.Features[0]; for (int i = 1; i < other.Features.Count; i++) { union = union.Union(other.Features[i].Geometry); pm.CurrentValue = i; } pm.Reset(); pm = new ProgressMeter(progHandler, "Calculating Intersections", self.NumRows()); Extent otherEnvelope = union.Geometry.EnvelopeInternal.ToExtent(); for (int shp = 0; shp < self.ShapeIndices.Count; shp++) { if (!self.ShapeIndices[shp].Extent.Intersects(otherEnvelope)) { continue; } IFeature selfFeature = self.GetFeature(shp); selfFeature.Intersection(union, result, joinType); pm.CurrentValue = shp; } pm.Reset(); } } else if (joinType == FieldJoinType.ForeignOnly) { if (!other.AttributesPopulated) { other.FillAttributes(); } result = new FeatureSet(); result.CopyTableSchema(other); result.FeatureType = other.FeatureType; if (self.Features != null && self.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", self.Features.Count); IFeature union = self.Features[0]; for (int i = 1; i < self.Features.Count; i++) { union = union.Union(self.Features[i].Geometry); pm.CurrentValue = i; } pm.Reset(); if (other.Features != null) { pm = new ProgressMeter(progHandler, "Calculating Intersection", other.Features.Count); for (int i = 0; i < other.Features.Count; i++) { other.Features[i].Intersection(union, result, FieldJoinType.LocalOnly); pm.CurrentValue = i; } } pm.Reset(); } } return(result); }
/// <summary> /// Gets the count of members that match the expression /// </summary> /// <param name="expressions">The string expression to test</param> /// <param name="progressHandler">THe progress handler that can also cancel the counting</param> /// <param name="maxSampleSize">The integer maximum sample size from which to draw counts. If this is negative, it will not be used.</param> /// <returns>The integer count of the members that match the expression.</returns> public override int[] GetCounts(string[] expressions, ICancelProgressHandler progressHandler, int maxSampleSize) { if (AttributesPopulated) return base.GetCounts(expressions, progressHandler, maxSampleSize); int[] counts = new int[expressions.Length]; // The most common case would be no filter expression, in which case the count is simply the number of shapes. bool requiresRun = false; for (int iex = 0; iex < expressions.Length; iex++) { if (!string.IsNullOrEmpty(expressions[iex])) { requiresRun = true; } else { counts[iex] = NumRows(); } } if (!requiresRun) return counts; AttributePager ap = new AttributePager(this, 5000); ProgressMeter pm = new ProgressMeter(progressHandler, "Calculating Counts", ap.NumPages()); // Don't bother to use a sampling approach if the number of rows is on the same order of magnitude as the number of samples. if (maxSampleSize > 0 && maxSampleSize < NumRows() / 2) { DataTable sample = new DataTable(); sample.Columns.AddRange(GetColumns()); Dictionary<int, int> usedRows = new Dictionary<int, int>(); int samplesPerPage = maxSampleSize / ap.NumPages(); Random rnd = new Random(DateTime.Now.Millisecond); for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < samplesPerPage; i++) { int row; do { row = rnd.Next(ap.StartIndex, ap.StartIndex + ap.PageSize); } while (usedRows.ContainsKey(row)); usedRows.Add(row, row); sample.Rows.Add(ap.Row(row).ItemArray); } ap.MoveNext(); pm.CurrentValue = page; if (progressHandler.Cancel) break; //Application.DoEvents(); } for (int i = 0; i < expressions.Length; i++) { try { DataRow[] dr = sample.Select(expressions[i]); counts[i] += dr.Length; } catch (Exception ex) { Debug.WriteLine(ex); } } pm.Reset(); return counts; } for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < expressions.Length; i++) { DataRow[] dr = ap[page].Select(expressions[i]); counts[i] += dr.Length; } pm.CurrentValue = page; if (progressHandler.Cancel) break; //Application.DoEvents(); } pm.Reset(); return counts; }
/// <summary> /// Obtains a typed list of ShapefilePoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">A string fileName</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List<ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // Get the basic header information. var header = new ShapefileHeader(fileName); Extent = header.ToExtent(); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Point && header.ShapeType != ShapeType.PointM && header.ShapeType != ShapeType.PointZ) { throw new ApplicationException(DataStrings.FileNotPoints_S.Replace("%S", fileName)); } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } var numShapes = shapeHeaders.Count; double[] m = null; double[] z = null; var vert = new double[2 * numShapes]; // X,Y if (header.ShapeType == ShapeType.PointM || header.ShapeType == ShapeType.PointZ) { m = new double[numShapes]; } if (header.ShapeType == ShapeType.PointZ) { z = new double[numShapes]; } var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 100.0 / numShapes); reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); var recordNumber = reader.ReadInt32(Endian.BigEndian); Debug.Assert(recordNumber == shp + 1); var contentLen = reader.ReadInt32(Endian.BigEndian); Debug.Assert(contentLen == shapeHeaders[shp].ContentLength); var shapeType = (ShapeType) reader.ReadInt32(); if (shapeType == ShapeType.NullShape) { if (m != null) { m[shp] = double.MinValue; } goto fin; } // Read X var ind = 4; vert[shp*2] = reader.ReadDouble(); ind += 8; // Read Y vert[shp*2 + 1] = reader.ReadDouble(); ind += 8; // Read Z if (z != null) { z[shp] = reader.ReadDouble(); ind += 8; } // Read M if (m != null) { if (shapeHeaders[shp].ByteLength <= ind) { m[shp] = double.MinValue; } else { m[shp] = reader.ReadDouble(); ind += 8; } } fin: var shape = new ShapeRange(FeatureType.Point) { RecordNumber = recordNumber, StartIndex = shp, ContentLength = shapeHeaders[shp].ContentLength, NumPoints = 1, NumParts = 1 }; ShapeIndices.Add(shape); var part = new PartRange(vert, shp, 0, FeatureType.Point) {NumVertices = 1}; shape.Parts.Add(part); shape.Extent = new Extent(new[] {vert[shp*2], vert[shp*2 + 1], vert[shp*2], vert[shp*2 + 1]}); } } Vertex = vert; M = m; Z = z; progressMeter.Reset(); }
/// <summary> /// Obtains a typed list of ShapefilePoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">A string fileName</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // Get the basic header information. var header = new ShapefileHeader(fileName); Extent = header.ToExtent(); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Point && header.ShapeType != ShapeType.PointM && header.ShapeType != ShapeType.PointZ) { throw new ApplicationException(DataStrings.FileNotPoints_S.Replace("%S", fileName)); } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } var numShapes = shapeHeaders.Count; double[] m = null; double[] z = null; var vert = new double[2 * numShapes]; // X,Y if (header.ShapeType == ShapeType.PointM || header.ShapeType == ShapeType.PointZ) { m = new double[numShapes]; } if (header.ShapeType == ShapeType.PointZ) { z = new double[numShapes]; } var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 100.0 / numShapes); reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); var recordNumber = reader.ReadInt32(Endian.BigEndian); Debug.Assert(recordNumber == shp + 1); var contentLen = reader.ReadInt32(Endian.BigEndian); Debug.Assert(contentLen == shapeHeaders[shp].ContentLength); var shapeType = (ShapeType)reader.ReadInt32(); if (shapeType == ShapeType.NullShape) { if (m != null) { m[shp] = double.MinValue; } goto fin; } // Read X var ind = 4; vert[shp * 2] = reader.ReadDouble(); ind += 8; // Read Y vert[shp * 2 + 1] = reader.ReadDouble(); ind += 8; // Read Z if (z != null) { z[shp] = reader.ReadDouble(); ind += 8; } // Read M if (m != null) { if (shapeHeaders[shp].ByteLength <= ind) { m[shp] = double.MinValue; } else { m[shp] = reader.ReadDouble(); ind += 8; } } fin: var shape = new ShapeRange(FeatureType.Point) { RecordNumber = recordNumber, StartIndex = shp, ContentLength = shapeHeaders[shp].ContentLength, NumPoints = 1, NumParts = 1 }; ShapeIndices.Add(shape); var part = new PartRange(vert, shp, 0, FeatureType.Point) { NumVertices = 1 }; shape.Parts.Add(part); shape.Extent = new Extent(new[] { vert[shp * 2], vert[shp * 2 + 1], vert[shp * 2], vert[shp * 2 + 1] }); } } Vertex = vert; M = m; Z = z; progressMeter.Reset(); }
/// <summary> /// Attempts to remove a range of shapes by index. This is optimized to /// work better for large numbers. For one or two, using RemoveShapeAt might /// be faster. /// </summary> /// <param name="indices"> /// The enumerable set of indices to remove. /// </param> public void RemoveShapesAt(IEnumerable<int> indices) { if (IndexMode == false) { foreach (int index in indices) { if (index < 0 || index >= _shapeIndices.Count) continue; Features.RemoveAt(index); } InitializeVertices(); return; } List<int> remove = indices.ToList(); remove.Sort(); if (remove.Count == 0) return; List<int> remaining = new List<int>(); for (int i = 0; i < _shapeIndices.Count; i++) { if (remove.Count > 0 && remove[0] == i) { remove.Remove(i); continue; } remaining.Add(i); } List<double> vertex = new List<double>(); List<double> z = new List<double>(); List<double> m = new List<double>(); int pointTotal = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Removing Vertices", remaining.Count); foreach (int index in remaining) { if (index < 0 || index >= _shapeIndices.Count) continue; ShapeRange sr = _shapeIndices[index]; double[] xyShape = new double[sr.NumPoints * 2]; Array.Copy(_vertices, sr.StartIndex * 2, xyShape, 0, sr.NumPoints * 2); vertex.AddRange(xyShape); ///////////////////////////////////////////////////////////////// // fix to address issue http://dotspatial.codeplex.com/workitem/174 //////////////////////////////////////////////////////////////// //// remove the m values if necessary //if (CoordinateType == CoordinateType.M) //{ // double[] mShape = new double[sr.NumPoints]; // Array.Copy(_m, sr.StartIndex, mShape, 0, sr.NumPoints); // m.AddRange(mShape); //} // remove the z values if necessary if (CoordinateType == CoordinateType.Z) { double[] zShape = new double[sr.NumPoints]; Array.Copy(_z, sr.StartIndex, zShape, 0, sr.NumPoints); z.AddRange(zShape); ///////////////////////////////////////////////////////////////// // fix to address issue http://dotspatial.codeplex.com/workitem/174 //////////////////////////////////////////////////////////////// double[] mShape = new double[sr.NumPoints]; Array.Copy(_m, sr.StartIndex, mShape, 0, sr.NumPoints); m.AddRange(mShape); ///////////////////////////////////////////////////////////////// } sr.StartIndex = pointTotal; pointTotal += sr.NumPoints; ProgressMeter.Next(); } ProgressMeter.Reset(); _vertices = vertex.ToArray(); _m = m.ToArray(); _z = z.ToArray(); remove = indices.ToList(); remove.Sort(); ProgressMeter = new ProgressMeter(ProgressHandler, "Removing indices", remove.Count); List<ShapeRange> result = new List<ShapeRange>(); int myIndex = 0; foreach (ShapeRange range in _shapeIndices) { if (remove.Count > 0 && remove[0] == myIndex) { remove.RemoveAt(0); } else { result.Add(range); } ProgressMeter.Next(); myIndex++; } _shapeIndices = result; ProgressMeter.Reset(); remove = indices.ToList(); remove.Sort(); remove.Reverse(); ProgressMeter = new ProgressMeter(ProgressHandler, "Removing Attribute Rows", remove.Count); foreach (int index in remove) { if (AttributesPopulated) { DataTable.Rows.RemoveAt(index); } ProgressMeter.Next(); } ProgressMeter.Reset(); ProgressMeter = new ProgressMeter(ProgressHandler, "Reassigning part vertex pointers", _shapeIndices.Count); // Updating the vertex array means that the parts are now pointing // to the wrong array of vertices internally. This doesn't affect // rendering, but will affect selection. foreach (ShapeRange shape in _shapeIndices) { foreach (PartRange part in shape.Parts) { part.Vertices = _vertices; } } }
/// <summary> /// Loads the shapes from the given file into the given shapefile. /// </summary> /// <param name="fileName">Name of the file whose shapes should get loaded.</param> /// <param name="progressHandler">ProgressHandler that shows the progress.</param> /// <param name="shapefile">Shapefile the shapes are loaded into.</param> /// <param name="featureType">FeatureType that should be inside the file.</param> /// <exception cref="ArgumentNullException">Throws an ArgumentNullException, if the shapefile is null.</exception> /// <exception cref="ArgumentException">Throws an ArgumentException, if the FeatureType is Line but the files doesn't contain lines or the FeatureType is Polygon and the file doesn't contain polygons.</exception> /// <exception cref="NotSupportedException">Throws a NotSupportedException, if a FeatureType other than Line or Polygon is passed.</exception> /// <exception cref="FileNotFoundException">Throws a FileNotFoundException, if the file whith the path from fileName doesn't exist.</exception> /// <exception cref="NullReferenceException">Throws a NullReferenceException, if the fileName is null.</exception> internal static void FillLines(string fileName, IProgressHandler progressHandler, Shapefile shapefile, FeatureType featureType) { // Check to ensure that the fileName is the correct shape type switch (featureType) { case FeatureType.Line: if (!CanBeRead(fileName, shapefile, ShapeType.PolyLine, ShapeType.PolyLineM, ShapeType.PolyLineZ)) { return; } break; case FeatureType.Polygon: if (!CanBeRead(fileName, shapefile, ShapeType.Polygon, ShapeType.PolygonM, ShapeType.PolygonZ)) { return; } break; default: throw new NotSupportedException(DataStrings.ShapeType0NotSupported); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. var header = shapefile.Header; var shapeHeaders = shapefile.ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = false, isZ = false; switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: isM = true; break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: isZ = true; isM = true; break; } int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List <ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records // might still exist in the .shp file. long offset = shapeHeaders[shp].ByteOffset; reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(featureType, shapefile.CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount }; Debug.Assert(shape.RecordNumber == shp + 1, "The shapes record number should equal" + shp + 1); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; // Num Parts shape.NumParts = reader.ReadInt32(); totalPartsCount += shape.NumParts; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; var partsInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } int mArrayInd = 0; if (isZ) { zArray = new double[totalPointsCount]; } int zArrayInd = 0; int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + (shp * 50.0 / numShapes)); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek((3 * 4) + 32 + (2 * 4), SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype + BoundingBox + NumParts, NumPoints) // Read parts var partsBytes = reader.ReadBytes(4 * shape.NumParts); // Numparts * Integer(4) = existing Parts Buffer.BlockCopy(partsBytes, 0, parts, partsInd, partsBytes.Length); partsInd += 4 * shape.NumParts; // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); // Numpoints * Point (X(8) + Y(8)) Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int part = 0; part < shape.NumParts; part++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[partsOffset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; var partR = new PartRange(vert, shape.StartIndex, parts[partsOffset + part], featureType) { NumVertices = count }; shape.Parts.Add(partR); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: if (shape.ContentLength * 2 > 44 + (4 * shape.NumParts) + (16 * shape.NumPoints)) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + (4 * shape.NumParts) + (24 * shape.NumPoints)) { goto case ShapeType.PolyLineM; } break; } } if (isM) { shapefile.M = mArray; } if (isZ) { shapefile.Z = zArray; } shapefile.ShapeIndices = shapeIndices; shapefile.Vertex = vert; } progressMeter.Reset(); }
/// <summary> /// Gets the statistics of all the values. If the entire content is not currently in-ram, /// ReadRow will be used to read individual lines and perform the calculations. /// </summary> public override void GetStatistics() { ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CalculatingStatistics, NumRows); T min = Global.MaximumValue <T>(); T max = Global.MinimumValue <T>(); double total = 0; double sqrTotal = 0; int count = 0; if (!IsInRam || !this.IsFullyWindowed()) { for (int row = 0; row < NumRowsInFile; row++) { T[] values = ReadRow(row); for (int col = 0; col < NumColumnsInFile; col++) { T val = values[col]; double dblVal = Global.ToDouble(val); if (dblVal == NoDataValue) { continue; } if (val.CompareTo(max) > 0) { max = val; } if (val.CompareTo(min) < 0) { min = val; } total += dblVal; sqrTotal += dblVal * dblVal; count++; } pm.CurrentValue = row; } } else { for (int row = 0; row < NumRows; row++) { for (int col = 0; col < NumColumns; col++) { T val = Data[row][col]; double dblVal = Global.ToDouble(val); if (dblVal == NoDataValue) { continue; } if (val.CompareTo(max) > 0) { max = val; } if (val.CompareTo(min) < 0) { min = val; } total += dblVal; sqrTotal += dblVal * dblVal; count++; } pm.CurrentValue = row; } } Value.Updated = false; Minimum = Global.ToDouble(min); Maximum = Global.ToDouble(max); Mean = total / count; NumValueCells = count; StdDeviation = (float)Math.Sqrt((sqrTotal / NumValueCells) - (total / NumValueCells * (total / NumValueCells))); pm.Reset(); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids() { int w = _header.ImageHeaders[0].NumColumns; int h = _header.ImageHeaders[0].NumRows; int blockHeight = 32000000 / w; if (blockHeight > h) blockHeight = h; int numBlocks = (int)Math.Ceiling(h / (double)blockHeight); ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", _header.ImageHeaders.Length * numBlocks); for (int block = 0; block < numBlocks; block++) { // Normally block height except for the lowest block which is usually smaller int bh = blockHeight; if (block == numBlocks - 1) bh = h - block * blockHeight; // Read a block of bytes into a bitmap byte[] vals = ReadWindow(block * blockHeight, 0, bh, w, 0); Bitmap bmp = new Bitmap(w, bh); BitmapData bd = bmp.LockBits(new Rectangle(0, 0, w, bh), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); Marshal.Copy(vals, 0, bd.Scan0, vals.Length); bmp.UnlockBits(bd); // cycle through the scales, and write the resulting smaller bitmap in an appropriate spot int sw = w; // scale width int sh = bh; // scale height int sbh = blockHeight; for (int scale = 1; scale < _header.ImageHeaders.Length - 1; scale++) { sw = sw / 2; sh = sh / 2; sbh = sbh / 2; if (sh == 0 || sw == 0) { break; } Bitmap subSet = new Bitmap(sw, sh); Graphics g = Graphics.FromImage(subSet); g.DrawImage(bmp, 0, 0, sw, sh); bmp.Dispose(); // since we keep getting smaller, don't bother keeping the big image in memory any more. bmp = subSet; // keep the most recent image alive for making even smaller subsets. g.Dispose(); BitmapData bdata = bmp.LockBits(new Rectangle(0, 0, sw, sh), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); byte[] res = new byte[sw * sh * 4]; Marshal.Copy(bdata.Scan0, res, 0, res.Length); bmp.UnlockBits(bdata); WriteWindow(res, sbh * block, 0, sh, sw, scale); pm.CurrentValue = block * _header.ImageHeaders.Length + scale; } vals = null; bmp.Dispose(); } pm.Reset(); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids2() { double count = _header.ImageHeaders[0].NumRows; ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", count); int prog = 0; for (int scale = 0; scale < _header.ImageHeaders.Length - 1; scale++) { PyramidImageHeader ph = _header.ImageHeaders[scale]; int rows = ph.NumRows; int cols = ph.NumColumns; // Horizontal Blur Pass byte[] r1 = ReadWindow(0, 0, 1, cols, scale); byte[] r2 = ReadWindow(1, 0, 1, cols, scale); byte[] vals = Blur(null, r1, r2); vals = DownSample(vals); WriteWindow(vals, 0, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; byte[] r3 = ReadWindow(2, 0, 1, cols, scale); vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, 1, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; for (int row = 3; row < rows - 1; row++) { r1 = r2; r2 = r3; r3 = ReadWindow(row, 0, 1, cols, scale); prog++; pm.CurrentValue = prog; if (row % 2 == 1) continue; vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, row / 2 - 1, 0, 1, cols / 2, scale + 1); } if ((rows - 1) % 2 == 0) { vals = Blur(r2, r3, r2); vals = DownSample(vals); WriteWindow(vals, rows / 2 - 1, 0, 1, cols / 2, scale + 1); } prog++; pm.CurrentValue = prog; } pm.Reset(); }
/// <summary> /// If no file exists, this writes the header and no-data values. If a file exists, it will assume /// that data already has been filled in the file and will attempt to insert the data values /// as a window into the file. If you want to create a copy of the file and values, just use /// System.IO.File.Copy, it almost certainly would be much more optimized. /// </summary> /// <param name="fileName">The string fileName to write values to.</param> public void Write(string fileName) { FileStream fs; BinaryWriter bw; ProgressMeter pm = new ProgressMeter(ProgressHandler, "Writing values to " + fileName, NumRows); long expectedByteCount = NumRows * NumColumns * ByteSize; if (expectedByteCount < 1000000) { pm.StepPercent = 5; } if (expectedByteCount < 5000000) { pm.StepPercent = 10; } if (expectedByteCount < 100000) { pm.StepPercent = 50; } if (File.Exists(fileName)) { FileInfo fi = new FileInfo(fileName); // if the following test fails, then the target raster doesn't fit the bill for pasting into, so clear it and write a new one. if (fi.Length == HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile) { WriteHeader(fileName); // assume that we already have a file set up for us, and just write the window of values into the appropriate place. fs = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None, ByteSize * NumColumns); fs.Seek(HeaderSize, SeekOrigin.Begin); fs.Seek(ByteSize * StartRow, SeekOrigin.Current); bw = new BinaryWriter(fs); // encoding doesn't matter because we don't have characters for (int row = 0; row < NumRows; row++) { fs.Seek(StartColumn * ByteSize, SeekOrigin.Current); for (int col = 0; col < NumColumns; col++) { // this is the only line that is type dependant, but I don't want to type check on every value bw.Write(Data[row][col]); } fs.Flush(); // Since I am buffering, make sure that I write the buffered data before seeking fs.Seek((NumColumnsInFile - EndColumn - 1) * ByteSize, SeekOrigin.Current); pm.CurrentValue = row; } pm.Reset(); bw.Close(); return; } // If we got here, either the file didn't exist or didn't match the specifications correctly, so write a new one. Debug.WriteLine("The size of the file was " + fi.Length + " which didn't match the expected " + HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile); File.Delete(fileName); } if (File.Exists(fileName)) { File.Delete(fileName); } WriteHeader(fileName); // Open as append and it will automatically skip the header for us. fs = new FileStream(fileName, FileMode.Append, FileAccess.Write, FileShare.None, ByteSize * NumColumnsInFile); bw = new BinaryWriter(fs); // the row and column counters here are relative to the whole file, not just the window that is currently in memory. pm.EndValue = NumRowsInFile; int noDataValue = Convert.ToInt32(NoDataValue); for (int row = 0; row < NumRowsInFile; row++) { for (int col = 0; col < NumColumnsInFile; col++) { if (row < StartRow || row > EndRow || col < StartColumn || col > EndColumn) { bw.Write(Convert.ToInt32(noDataValue)); } else { bw.Write(Data[row - StartRow][col - StartColumn]); } } pm.CurrentValue = row; } fs.Flush(); // flush anything that hasn't gotten written yet. pm.Reset(); bw.Close(); }
/// <summary> /// This creates a completely new raster from the windowed domain on the original raster. This new raster /// will have a separate source file, and values like NumRowsInFile will correspond to the newly created file. /// All the values will be copied to the new source file. If inRam = true and the new raster is small enough, /// the raster values will be loaded into memory. /// </summary> /// <param name="fileName"></param> /// <param name="startRow">The 0 based integer index of the top row to copy from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to copy from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to copy from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to copy from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="copyValues">If this is true, the values are saved to the file. If this is false and the data can be loaded into Ram, no file handling is done. Otherwise, a file of NoData values is created.</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public new IRaster CopyWindow(string fileName, int startRow, int endRow, int startColumn, int endColumn, bool copyValues, bool inRam) { int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; var result = new BinaryRaster <T>(fileName, numCols, numRows, inRam) { Projection = Projection }; // The affine coefficients defining the world file are the same except that they are translated over. Only the position of the // upper left corner changes. Everything else is the same as the previous raster. var ac = new AffineTransform(Bounds.AffineCoefficients).TransfromToCorner(startColumn, startRow); result.Bounds = new RasterBounds(result.NumRows, result.NumColumns, ac); if (IsInRam) { ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, numRows); // copy values directly using both data structures for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); if (result.IsInRam == false) { // Force the result raster to write itself to a file and then purge its memory. result.Write(fileName); result.Data = null; } } else { if (result.IsInRam) { // the source is not in memory, so we just read the values from the file as if opening it directly from the file. result.OpenWindow(Filename, startRow, endRow, startColumn, endColumn, true); } else { // Both sources are file based so we basically copy rows of bytes from one to the other. FileStream source = new FileStream(Filename, FileMode.Open, FileAccess.Read, FileShare.Read); result.WriteHeader(fileName); FileStream dest = new FileStream(fileName, FileMode.Append, FileAccess.Write, FileShare.None); source.Seek(HeaderSize, SeekOrigin.Begin); BinaryReader bReader = new BinaryReader(source); BinaryWriter bWriter = new BinaryWriter(dest); ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, numRows); // copy values directly using both data structures source.Seek(NumColumnsInFile * startRow * ByteSize, SeekOrigin.Current); for (int row = 0; row < numRows; row++) { source.Seek(numCols * ByteSize, SeekOrigin.Current); byte[] rowData = bReader.ReadBytes(ByteSize * numCols); bWriter.Write(rowData); source.Seek(NumColumnsInFile - endColumn + 1, SeekOrigin.Current); bWriter.Flush(); pm.CurrentValue = row; } pm.Reset(); } } return(result); }
// ----------------------------------- FROM AND TO IN RAM ONLY --------------------------------- /// <summary> /// This creates an IN MEMORY ONLY window from the in-memory window of this raster. If, however, the requested range /// is outside of what is contained in the in-memory portions of this raster, an appropriate cast /// is required to ensure that you have the correct File handling, like a BinaryRaster etc. /// </summary> /// <param name="startRow">The 0 based integer index of the top row to get from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to get from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to get from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to get from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public IRaster GetWindow(int startRow, int endRow, int startColumn, int endColumn, bool inRam) { if (IsInRam == false) { throw new ArgumentException(DataStrings.RasterRequiresCast); } if (startRow < StartRow || endRow > EndRow || StartColumn < startColumn || EndColumn > endColumn) { // the requested extents are outside of the extents that have been windowed into ram. File Handling is required. throw new ArgumentException(DataStrings.RasterRequiresCast); } int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; Raster <T> result = new Raster <T>(numRows, numCols); result.Filename = Filename; result.Projection = Projection; result.DataType = typeof(int); result.NumRows = numRows; result.NumColumns = numCols; result.NumRowsInFile = NumRowsInFile; result.NumColumnsInFile = NumColumnsInFile; result.NoDataValue = NoDataValue; result.StartColumn = startColumn; result.StartRow = startRow; result.EndColumn = endColumn; result.EndRow = EndRow; result.FileType = FileType; // Reposition the new "raster" so that it matches the specified window, not the whole raster // X = [0] + [1] * column + [2] * row; // Y = [3] + [4] * column + [5] * row; result.Bounds.AffineCoefficients = new double[6]; result.Bounds.AffineCoefficients[0] = Bounds.AffineCoefficients[0] + Bounds.AffineCoefficients[1] * startColumn + Bounds.AffineCoefficients[2] * startRow; result.Bounds.AffineCoefficients[1] = Bounds.AffineCoefficients[1]; result.Bounds.AffineCoefficients[2] = Bounds.AffineCoefficients[2]; result.Bounds.AffineCoefficients[3] = Bounds.AffineCoefficients[3] + Bounds.AffineCoefficients[4] * startColumn + Bounds.AffineCoefficients[5] * startRow; result.Bounds.AffineCoefficients[4] = Bounds.AffineCoefficients[4]; result.Bounds.AffineCoefficients[5] = Bounds.AffineCoefficients[5]; // Now we can copy any values currently in memory. ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, endRow); pm.StartValue = startRow; // copy values directly using both data structures for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); result.Value = new ValueGrid <T>(result); return(result); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids() { int w = Header.ImageHeaders[0].NumColumns; int h = Header.ImageHeaders[0].NumRows; int blockHeight = 32000000 / w; if (blockHeight > h) { blockHeight = h; } int numBlocks = (int)Math.Ceiling(h / (double)blockHeight); ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", Header.ImageHeaders.Length * numBlocks); for (int block = 0; block < numBlocks; block++) { // Normally block height except for the lowest block which is usually smaller int bh = blockHeight; if (block == numBlocks - 1) { bh = h - (block * blockHeight); } // Read a block of bytes into a bitmap byte[] vals = ReadWindow(block * blockHeight, 0, bh, w, 0); Bitmap bmp = new Bitmap(w, bh); BitmapData bd = bmp.LockBits(new Rectangle(0, 0, w, bh), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); Marshal.Copy(vals, 0, bd.Scan0, vals.Length); bmp.UnlockBits(bd); // cycle through the scales, and write the resulting smaller bitmap in an appropriate spot int sw = w; // scale width int sh = bh; // scale height int sbh = blockHeight; for (int scale = 1; scale < Header.ImageHeaders.Length - 1; scale++) { sw = sw / 2; sh = sh / 2; sbh = sbh / 2; if (sh == 0 || sw == 0) { break; } Bitmap subSet = new Bitmap(sw, sh); Graphics g = Graphics.FromImage(subSet); g.DrawImage(bmp, 0, 0, sw, sh); bmp.Dispose(); // since we keep getting smaller, don't bother keeping the big image in memory any more. bmp = subSet; // keep the most recent image alive for making even smaller subsets. g.Dispose(); BitmapData bdata = bmp.LockBits(new Rectangle(0, 0, sw, sh), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); byte[] res = new byte[sw * sh * 4]; Marshal.Copy(bdata.Scan0, res, 0, res.Length); bmp.UnlockBits(bdata); WriteWindow(res, sbh * block, 0, sh, sw, scale); pm.CurrentValue = (block * Header.ImageHeaders.Length) + scale; } vals = null; bmp.Dispose(); } pm.Reset(); }
/// <summary> /// This populates the Table with data from the file. /// </summary> /// <param name="numRows">In the event that the dbf file is not found, this indicates how many blank rows should exist in the attribute Table.</param> public void Fill(int numRows) { if (_isFilling) return; // Changed by jany_ (2015-07-30) don't load again because the fill methode is called from inside the fill methode and we'd get a datatable that is filled with twice the existing records _attributesPopulated = false; _dataTable.Rows.Clear(); // if we have already loaded data, clear the data. _isFilling = true; if (File.Exists(_fileName) == false) { _numRecords = numRows; _dataTable.BeginLoadData(); if (!_dataTable.Columns.Contains("FID")) { _dataTable.Columns.Add("FID", typeof(int)); } for (int row = 0; row < numRows; row++) { DataRow dr = _dataTable.NewRow(); dr["FID"] = row; _dataTable.Rows.Add(dr); } _dataTable.EndLoadData(); return; } if (!_loaded) GetRowOffsets(); ProgressMeter = new ProgressMeter(ProgressHandler, "Reading from DBF Table...", _numRecords); if (_numRecords < 10000) ProgressMeter.StepPercent = 100; else if (_numRecords < 100000) ProgressMeter.StepPercent = 50; else if (_numRecords < 5000000) ProgressMeter.StepPercent = 10; else if (_numRecords < 10000000) ProgressMeter.StepPercent = 5; _dataTable.BeginLoadData(); // Reading the Table elements as well as the shapes in a single progress loop. using (var myReader = GetBinaryReader()) { for (int row = 0; row < _numRecords; row++) { // --------- DATABASE --------- CurrentFeature = ReadTableRow(myReader); DataRow nextRow = null; try { nextRow = ReadTableRowFromChars(row, myReader); } catch (Exception ex) { Debug.WriteLine(ex.ToString()); nextRow = _dataTable.NewRow(); } finally { _dataTable.Rows.Add(nextRow); } // If a progress message needs to be updated, this will handle that. ProgressMeter.CurrentValue = row; } } ProgressMeter.Reset(); _dataTable.EndLoadData(); _attributesPopulated = true; OnAttributesFilled(); _isFilling = false; }
/// <summary> /// If no file exists, this writes the header and no-data values. If a file exists, it will assume /// that data already has been filled in the file and will attempt to insert the data values /// as a window into the file. If you want to create a copy of the file and values, just use /// System.IO.File.Copy, it almost certainly would be much more optimized. /// </summary> private void Write(string fileName) { ProgressMeter pm = new ProgressMeter(ProgressHandler, "Writing values to " + Filename, NumRows); long expectedByteCount = NumRows * NumColumns * ByteSize; if (expectedByteCount < 1000000) { pm.StepPercent = 5; } if (expectedByteCount < 5000000) { pm.StepPercent = 10; } if (expectedByteCount < 100000) { pm.StepPercent = 50; } if (File.Exists(fileName)) { FileInfo fi = new FileInfo(Filename); // if the following test fails, then the target raster doesn't fit the bill for pasting into, so clear it and write a new one. if (fi.Length == HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile) { WriteHeader(fileName); WriteRaster(Data); return; } // If we got here, either the file didn't exist or didn't match the specifications correctly, so write a new one. Debug.WriteLine("The size of the file was " + fi.Length + " which didn't match the expected " + HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile); } if (File.Exists(Filename)) { File.Delete(Filename); } WriteHeader(fileName); // Open as append and it will automatically skip the header for us. using (var bw = new BinaryWriter(new FileStream(Filename, FileMode.Append, FileAccess.Write, FileShare.None, ByteSize * NumColumnsInFile))) { // the row and column counters here are relative to the whole file, not just the window that is currently in memory. pm.EndValue = NumRowsInFile; for (int row = 0; row < NumRowsInFile; row++) { byte[] rawBytes = new byte[NumColumnsInFile * ByteSize]; T[] nd = new T[1]; nd[0] = (T)Convert.ChangeType(NoDataValue, typeof(T)); Buffer.BlockCopy(Data[row - StartRow], 0, rawBytes, StartColumn * ByteSize, NumColumns * ByteSize); for (int col = 0; col < StartColumn; col++) { Buffer.BlockCopy(nd, 0, rawBytes, col * ByteSize, ByteSize); } for (int col = EndColumn + 1; col < NumColumnsInFile; col++) { Buffer.BlockCopy(nd, 0, rawBytes, col * ByteSize, ByteSize); } bw.Write(rawBytes); pm.CurrentValue = row; } } pm.Reset(); }
// X Y MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 8 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // X Y M MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 28 Integer 1 Little // Byte 12 Box (Xmin - Ymax) Double 4 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // Byte X* Mmin Double 1 Little // Byte X+8* Mmax Double 1 Little // Byte X+16* Marray Double NumPoints Little // X = 48 + (16 * NumPoints) // * = optional // X Y Z M MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 18 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // Byte X Zmin Double 1 Little // Byte X+8 Zmax Double 1 Little // Byte X+16 Zarray Double NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y+8* Mmax Double 1 Little // Byte Y+16* Marray Double NumPoints Little // X = 48 + (16 * NumPoints) // Y = X + 16 + (8 * NumPoints) // * = optional /// <summary> /// Obtains a typed list of MultiPoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">Name of the file that gets loaded.</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { // Check whether file is empty or not all parameters are set correctly. if (!CanBeRead(fileName, this, ShapeType.MultiPoint, ShapeType.MultiPointM, ShapeType.MultiPointZ)) { return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = Header.ShapeType == ShapeType.MultiPointZ || Header.ShapeType == ShapeType.MultiPointM; bool isZ = Header.ShapeType == ShapeType.MultiPointZ; int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List <ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records might still exist in the .shp file. long offset = shapeHeaders[shp].ByteOffset; reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(FeatureType.MultiPoint, CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount, NumParts = 1 }; Debug.Assert(shape.RecordNumber == shp + 1, "shape.RecordNumber == shp + 1"); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; //// Num Parts totalPartsCount += 1; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; int mArrayInd = 0, zArrayInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } if (isZ) { zArray = new double[totalPointsCount]; } int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + shp * 50.0 / numShapes); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4 + 32 + 4, SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype + BoundingBox + NumPoints) // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); // Numpoints * Point (X(8) + Y(8)) Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int p = 0; p < shape.NumParts; p++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + p] + shape.StartIndex; if (p < shape.NumParts - 1) { endIndex = parts[partsOffset + p + 1] + shape.StartIndex; } int count = endIndex - startIndex; var part = new PartRange(vert, shape.StartIndex, parts[partsOffset + p], FeatureType.MultiPoint) { NumVertices = count }; shape.Parts.Add(part); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (Header.ShapeType) { case ShapeType.MultiPointM: if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.MultiPointZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to // determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints) { goto case ShapeType.MultiPointM; } break; } } if (isM) { M = mArray; } if (isZ) { Z = zArray; } ShapeIndices = shapeIndices; Vertex = vert; } progressMeter.Reset(); }
// X Y Poly Lines: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 3 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // X Y M Poly Lines: Total Length = 34 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 23 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y + 8* Mmax Double 1 Little // Byte Y + 16* Marray Double NumPoints Little // X Y Z M Poly Lines: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 13 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y Zmin Double 1 Little // Byte Y + 8 Zmax Double 1 Little // Byte Y + 16 Zarray Double NumPoints Little // Byte Z* Mmin Double 1 Little // Byte Z+8* Mmax Double 1 Little // Byte Z+16* Marray Double NumPoints Little private void FillPolygons(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", fileName)); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. ShapefileHeader header = new ShapefileHeader(fileName); Extent = new Extent(new[] { header.Xmin, header.Ymin, header.Xmax, header.Ymax }); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // TO DO: replace with a normal reader. We no longer need Buffered Binary reader as // the buffer can be set on the underlying file stream. BufferedBinaryReader bbReader = new BufferedBinaryReader(fileName, progressHandler); if (bbReader.FileLength == 100) { // The shapefile is empty so we can simply return here bbReader.Close(); return; } // Skip the shapefile header by skipping the first 100 bytes in the shapefile bbReader.Seek(100, SeekOrigin.Begin); int numShapes = shapeHeaders.Count; int[] partOffsets = new int[numShapes]; //byte[] allBounds = new byte[numShapes * 32]; // probably all will be in one block, but use a byteBlock just in case. ByteBlock allParts = new ByteBlock(BLOCKSIZE); ByteBlock allCoords = new ByteBlock(BLOCKSIZE); bool isM = (header.ShapeType == ShapeType.PolygonM || header.ShapeType == ShapeType.PolygonZ); bool isZ = (header.ShapeType == ShapeType.PolygonZ); ByteBlock allZ = null; ByteBlock allM = null; if (isZ) { allZ = new ByteBlock(BLOCKSIZE); } if (isM) { allM = new ByteBlock(BLOCKSIZE); } int pointOffset = 0; for (int shp = 0; shp < numShapes; shp++) { // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); bbReader.Seek(offset, SeekOrigin.Begin); // Position Value Type Number Byte Order ShapeRange shape = new ShapeRange(FeatureType.Polygon); //------------------------------------ shape.RecordNumber = bbReader.ReadInt32(false); // Byte 0 Record Integer 1 Big shape.ContentLength = bbReader.ReadInt32(false); // Byte 4 Length Integer 1 Big // Setting shape type also controls extent class type. shape.ShapeType = (ShapeType)bbReader.ReadInt32(); // Byte 8 Type Integer 1 Little shape.StartIndex = pointOffset; if (shape.ShapeType == ShapeType.NullShape) { continue; } shape.Extent.MinX = bbReader.ReadDouble(); shape.Extent.MinY = bbReader.ReadDouble(); shape.Extent.MaxX = bbReader.ReadDouble(); shape.Extent.MaxY = bbReader.ReadDouble(); shape.NumParts = bbReader.ReadInt32(); // Byte 44 #Parts Integer 1 Little shape.NumPoints = bbReader.ReadInt32(); // Byte 48 #Points Integer 1 Little partOffsets[shp] = allParts.IntOffset(); allParts.Read(shape.NumParts * 4, bbReader); allCoords.Read(shape.NumPoints * 16, bbReader); pointOffset += shape.NumPoints; if (header.ShapeType == ShapeType.PolygonM) { // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) { allM.Read(shape.NumPoints * 8, bbReader); } } } if (header.ShapeType == ShapeType.PolygonZ) { bool hasM = shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints; IExtentZ zExt = (IExtentZ)shape.Extent; zExt.MinZ = bbReader.ReadDouble(); zExt.MaxZ = bbReader.ReadDouble(); // For Z shapefiles, the Z part is not optional. if (allZ != null) { allZ.Read(shape.NumPoints * 8, bbReader); } // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (hasM) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) { allM.Read(shape.NumPoints * 8, bbReader); } } } ShapeIndices.Add(shape); } double[] vert = allCoords.ToDoubleArray(); Vertex = vert; if (isM) { M = allM.ToDoubleArray(); } if (isZ) { Z = allZ.ToDoubleArray(); } List <ShapeRange> shapes = ShapeIndices; //double[] bounds = new double[numShapes * 4]; //Buffer.BlockCopy(allBounds, 0, bounds, 0, allBounds.Length); int[] parts = allParts.ToIntArray(); ProgressMeter = new ProgressMeter(ProgressHandler, "Testing Parts and Holes", shapes.Count); for (int shp = 0; shp < shapes.Count; shp++) { ShapeRange shape = shapes[shp]; //shape.Extent = new Extent(bounds, shp * 4); for (int part = 0; part < shape.NumParts; part++) { int offset = partOffsets[shp]; int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[offset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[offset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; PartRange partR = new PartRange(vert, shape.StartIndex, parts[offset + part], FeatureType.Polygon); partR.NumVertices = count; shape.Parts.Add(partR); } ProgressMeter.CurrentValue = shp; } ProgressMeter.Reset(); }
/// <summary> /// Populates the given streams for the shp and shx file when not in IndexMode. /// </summary> /// <param name="shpStream">Stream that is used to write the shp file.</param> /// <param name="shxStream">Stream that is used to write the shx file.</param> /// <param name="shapefile">The shapefile that contains the features that are written.</param> /// <param name="addPoints">Function that is used to add the points from the features to the parts and points lists.</param> /// <param name="expectedZType">Indicates which Z-ShapeType the header must have for the z values to be written.</param> /// <param name="expectedMType">Indicates which M-ShapeType the header must have for the m values to be written.</param> /// <param name="withParts">Indicates whether the parts should be written.</param> /// <returns>The lengths of the streams in bytes.</returns> internal static StreamLengthPair PopulateStreamsNotIndexed(Stream shpStream, Stream shxStream, Shapefile shapefile, Action <List <int>, List <Coordinate>, IFeature> addPoints, ShapeType expectedZType, ShapeType expectedMType, bool withParts) { var progressMeter = new ProgressMeter(shapefile.ProgressHandler, "Saving (Not Indexed)...", shapefile.Features.Count); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words foreach (IFeature f in shapefile.Features) { List <int> parts = new List <int>(); List <Coordinate> points = new List <Coordinate>(); addPoints(parts, points, f); bool isNullShape = false; int contentLength; // null shapes have a contentLength of 2, all other shapes must have the same shape type if (f.Geometry.IsEmpty) { contentLength = 2; isNullShape = true; } else { contentLength = GetContentLength(parts.Count, points.Count, shapefile.Header.ShapeType); } //// Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- shxStream.WriteBe(offset); // Byte 0 Offset Integer 1 Big shxStream.WriteBe(contentLength); // Byte 4 Content Length Integer 1 Big //// X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // ------------------------------------------------------- shpStream.WriteBe(fid + 1); // Byte 0 Record Number Integer 1 Big shpStream.WriteBe(contentLength); // Byte 4 Content Length Integer 1 Big if (isNullShape) { shpStream.WriteLe((int)ShapeType.NullShape); // Byte 8 Shape Type 0 Integer 1 Little } else { shpStream.WriteLe((int)shapefile.Header.ShapeType); // Byte 8 Shape Type Integer 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MinX); // Byte 12 Xmin Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MinY); // Byte 20 Ymin Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MaxX); // Byte 28 Xmax Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MaxY); // Byte 36 Ymax Double 1 Little if (withParts) { shpStream.WriteLe(parts.Count); // Byte 44 NumParts Integer 1 Little } shpStream.WriteLe(points.Count); // Byte 48 NumPoints Integer 1 Little if (withParts) //// Byte 52 Parts Integer NumParts Little { foreach (int part in parts) { shpStream.WriteLe(part); } } double[] xyVals = new double[points.Count * 2]; // Byte X Points Point NumPoints Little for (int i = 0; i < points.Count; i++) { xyVals[i * 2] = points[i].X; xyVals[(i * 2) + 1] = points[i].Y; } shpStream.WriteLe(xyVals); if (shapefile.Header.ShapeType == expectedZType) { // Pandell, 2020-06-23: "NetTopologySuite" version 1.7.5 doesn't have "NetTopologySuite.Geometries.Envelope.Minimum" property // shpStream.WriteLe(f.Geometry.EnvelopeInternal.Minimum.Z); // shpStream.WriteLe(f.Geometry.EnvelopeInternal.Maximum.Z); shpStream.WriteLe(double.NaN); shpStream.WriteLe(double.NaN); double[] zVals = new double[points.Count]; for (int i = 0; i < points.Count; i++) { zVals[i] = points[i].Z; } shpStream.WriteLe(zVals); } if (shapefile.Header.ShapeType == expectedMType || shapefile.Header.ShapeType == expectedZType) { // Pandell, 2020-06-23: "NetTopologySuite" version 1.7.5 doesn't have "NetTopologySuite.Geometries.Envelope.Minimum" property // shpStream.WriteLe(f.Geometry.EnvelopeInternal.Minimum.M); // shpStream.WriteLe(f.Geometry.EnvelopeInternal.Maximum.M); shpStream.WriteLe(double.NaN); shpStream.WriteLe(double.NaN); double[] mVals = new double[points.Count]; for (int i = 0; i < points.Count; i++) { mVals[i] = points[i].M; } shpStream.WriteLe(mVals); } } progressMeter.CurrentValue = fid; fid++; offset += 4; // header bytes offset += contentLength; // adding the content length from each loop calculates the word offset } progressMeter.Reset(); return(new StreamLengthPair { ShpLength = offset, ShxLength = 50 + (fid * 4) }); }
// X Y Points: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 1 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // X Y M Points: Total Length = 36 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 21 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // Byte 28 M Double 1 Little // X Y Z M Points: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 11 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // Byte 28 Z Double 1 Little // Byte 36 M Double 1 Little /// <summary> /// Obtains a typed list of ShapefilePoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">A string fileName</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { if (!CanBeRead(fileName, this, ShapeType.Point, ShapeType.PointM, ShapeType.PointZ)) { return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); var numShapes = shapeHeaders.Count; var shapeIndices = new List <ShapeRange>(numShapes); int totalPointsCount = 0; var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); var shape = new ShapeRange(FeatureType.Point, CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), StartIndex = totalPointsCount, ShapeType = (ShapeType)reader.ReadInt32() }; Debug.Assert(shape.RecordNumber == shp + 1, "The record number should equal " + shp + 1); Debug.Assert(shape.ContentLength == shapeHeaders[shp].ContentLength, "The shapes content length should equals the shapeHeaders content length."); if (shape.ShapeType == ShapeType.NullShape) { shape.NumPoints = 0; shape.NumParts = 0; } else { totalPointsCount += 1; shape.NumPoints = 1; shape.NumParts = 1; } shapeIndices.Add(shape); } double[] m = null; double[] z = null; var vert = new double[2 * totalPointsCount]; // X,Y if (Header.ShapeType == ShapeType.PointM || Header.ShapeType == ShapeType.PointZ) { m = new double[totalPointsCount]; } if (Header.ShapeType == ShapeType.PointZ) { z = new double[totalPointsCount]; } int i = 0; for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + (shp * 50.0 / numShapes)); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4, SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype) // Read X var ind = 4; vert[i * 2] = reader.ReadDouble(); ind += 8; // Read Y vert[(i * 2) + 1] = reader.ReadDouble(); ind += 8; // Read Z if (z != null) { z[i] = reader.ReadDouble(); ind += 8; } // Read M if (m != null) { if (shapeHeaders[shp].ByteLength <= ind) { m[i] = double.MinValue; } else { m[i] = reader.ReadDouble(); } } var part = new PartRange(vert, shape.StartIndex, 0, FeatureType.Point) { NumVertices = 1 }; shape.Parts.Add(part); shape.Extent = new Extent(new[] { vert[i * 2], vert[(i * 2) + 1], vert[i * 2], vert[(i * 2) + 1] }); i++; } Vertex = vert; M = m; Z = z; ShapeIndices = shapeIndices; } progressMeter.Reset(); }
/// <summary> /// Saves the file to a new location /// </summary> /// <param name="fileName">The fileName to save</param> /// <param name="overwrite">Boolean that specifies whether or not to overwrite the existing file</param> public override void SaveAs(string fileName, bool overwrite) { if (IndexMode) { SaveAsIndexed(fileName, overwrite); return; } string dir = Path.GetDirectoryName(fileName); if (dir != null && !Directory.Exists(dir)) { Directory.CreateDirectory(dir); } if (File.Exists(fileName)) { if (fileName != Filename && overwrite == false) throw new IOException("File exists."); File.Delete(fileName); string shx = Path.ChangeExtension(fileName, ".shx"); if (File.Exists(shx)) File.Delete(shx); } InvalidateEnvelope(); if (CoordinateType == CoordinateType.Regular) { Header.ShapeType = ShapeType.MultiPoint; } if (CoordinateType == CoordinateType.M) { Header.ShapeType = ShapeType.MultiPointM; } if (CoordinateType == CoordinateType.Z) { Header.ShapeType = ShapeType.MultiPointZ; } Header.SetExtent(MyExtent); Header.ShxLength = 50 + 4 * Features.Count; Header.SaveAs(fileName); BufferedBinaryWriter bbWriter = new BufferedBinaryWriter(fileName); BufferedBinaryWriter indexWriter = new BufferedBinaryWriter(Header.ShxFilename); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words int contentLength = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); foreach (IFeature f in Features) { offset += contentLength; // adding the previous content length from each loop calculates the word offset List<Coordinate> points = new List<Coordinate>(); contentLength = 20; for (int iPart = 0; iPart < f.NumGeometries; iPart++) { IList<Coordinate> coords = f.BasicGeometry.GetBasicGeometryN(iPart).Coordinates; foreach (Coordinate coord in coords) { points.Add(coord); } } if (Header.ShapeType == ShapeType.MultiPoint) { contentLength += points.Count * 8; } if (Header.ShapeType == ShapeType.MultiPointM) { contentLength += 8; // mmin, mmax contentLength += points.Count * 12; } if (Header.ShapeType == ShapeType.MultiPointZ) { contentLength += 16; // mmin, mmax, zmin, zmax contentLength += points.Count * 16; } // Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- indexWriter.Write(offset, false); // Byte 0 Offset Integer 1 Big indexWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big // X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- bbWriter.Write(fid + 1, false); // Byte 0 Record Integer 1 Big bbWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big bbWriter.Write((int)Header.ShapeType); // Byte 8 Shape Integer 1 Little if (Header.ShapeType == ShapeType.NullShape) { continue; } bbWriter.Write(f.Envelope.Minimum.X); // Byte 12 Xmin Double 1 Little bbWriter.Write(f.Envelope.Minimum.Y); // Byte 20 Ymin Double 1 Little bbWriter.Write(f.Envelope.Maximum.X); // Byte 28 Xmax Double 1 Little bbWriter.Write(f.Envelope.Maximum.Y); // Byte 36 Ymax Double 1 Little bbWriter.Write(points.Count); // Byte 44 #Points Integer 1 Little // Byte X Points Point #Points Little foreach (Coordinate coord in points) { bbWriter.Write(coord.X); bbWriter.Write(coord.Y); } if (Header.ShapeType == ShapeType.MultiPointZ) { bbWriter.Write(f.Envelope.Minimum.Z); bbWriter.Write(f.Envelope.Maximum.Z); foreach (Coordinate coord in points) { bbWriter.Write(coord.Z); } } if (Header.ShapeType == ShapeType.MultiPointM || Header.ShapeType == ShapeType.MultiPointZ) { if (f.Envelope == null) { bbWriter.Write(0.0); bbWriter.Write(0.0); } else { bbWriter.Write(f.Envelope.Minimum.M); bbWriter.Write(f.Envelope.Maximum.M); } foreach (Coordinate coord in points) { bbWriter.Write(coord.M); } } ProgressMeter.CurrentValue = fid; fid++; offset += 4; } ProgressMeter.Reset(); bbWriter.Close(); indexWriter.Close(); offset += contentLength; WriteFileLength(Filename, offset); UpdateAttributes(); SaveProjection(); }
// X Y Poly Lines: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 3 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // X Y M Poly Lines: Total Length = 34 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 23 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y + 8* Mmax Double 1 Little // Byte Y + 16* Marray Double NumPoints Little // X Y Z M Poly Lines: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 13 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y Zmin Double 1 Little // Byte Y + 8 Zmax Double 1 Little // Byte Y + 16 Zarray Double NumPoints Little // Byte Z* Mmin Double 1 Little // Byte Z+8* Mmax Double 1 Little // Byte Z+16* Marray Double NumPoints Little private void FillPolygons(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", fileName)); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. ShapefileHeader header = new ShapefileHeader(fileName); Extent = new Extent(new[] { header.Xmin, header.Ymin, header.Xmax, header.Ymax }); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List<ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // TO DO: replace with a normal reader. We no longer need Buffered Binary reader as // the buffer can be set on the underlying file stream. BufferedBinaryReader bbReader = new BufferedBinaryReader(fileName, progressHandler); if (bbReader.FileLength == 100) { // The shapefile is empty so we can simply return here bbReader.Close(); return; } // Skip the shapefile header by skipping the first 100 bytes in the shapefile bbReader.Seek(100, SeekOrigin.Begin); int numShapes = shapeHeaders.Count; int[] partOffsets = new int[numShapes]; //byte[] allBounds = new byte[numShapes * 32]; // probably all will be in one block, but use a byteBlock just in case. ByteBlock allParts = new ByteBlock(BLOCKSIZE); ByteBlock allCoords = new ByteBlock(BLOCKSIZE); bool isM = (header.ShapeType == ShapeType.PolygonM || header.ShapeType == ShapeType.PolygonZ); bool isZ = (header.ShapeType == ShapeType.PolygonZ); ByteBlock allZ = null; ByteBlock allM = null; if (isZ) { allZ = new ByteBlock(BLOCKSIZE); } if (isM) { allM = new ByteBlock(BLOCKSIZE); } int pointOffset = 0; for (int shp = 0; shp < numShapes; shp++) { // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); bbReader.Seek(offset, SeekOrigin.Begin); // Position Value Type Number Byte Order ShapeRange shape = new ShapeRange(FeatureType.Polygon); //------------------------------------ shape.RecordNumber = bbReader.ReadInt32(false); // Byte 0 Record Integer 1 Big shape.ContentLength = bbReader.ReadInt32(false); // Byte 4 Length Integer 1 Big // Setting shape type also controls extent class type. shape.ShapeType = (ShapeType)bbReader.ReadInt32(); // Byte 8 Type Integer 1 Little shape.StartIndex = pointOffset; if (shape.ShapeType == ShapeType.NullShape) { continue; } shape.Extent.MinX = bbReader.ReadDouble(); shape.Extent.MinY = bbReader.ReadDouble(); shape.Extent.MaxX = bbReader.ReadDouble(); shape.Extent.MaxY = bbReader.ReadDouble(); shape.NumParts = bbReader.ReadInt32(); // Byte 44 #Parts Integer 1 Little shape.NumPoints = bbReader.ReadInt32(); // Byte 48 #Points Integer 1 Little partOffsets[shp] = allParts.IntOffset(); allParts.Read(shape.NumParts * 4, bbReader); allCoords.Read(shape.NumPoints * 16, bbReader); pointOffset += shape.NumPoints; if (header.ShapeType == ShapeType.PolygonM) { // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) allM.Read(shape.NumPoints * 8, bbReader); } } if (header.ShapeType == ShapeType.PolygonZ) { bool hasM = shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints; IExtentZ zExt = (IExtentZ)shape.Extent; zExt.MinZ = bbReader.ReadDouble(); zExt.MaxZ = bbReader.ReadDouble(); // For Z shapefiles, the Z part is not optional. if (allZ != null) allZ.Read(shape.NumPoints * 8, bbReader); // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (hasM) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) allM.Read(shape.NumPoints * 8, bbReader); } } ShapeIndices.Add(shape); } double[] vert = allCoords.ToDoubleArray(); Vertex = vert; if (isM) M = allM.ToDoubleArray(); if (isZ) Z = allZ.ToDoubleArray(); List<ShapeRange> shapes = ShapeIndices; //double[] bounds = new double[numShapes * 4]; //Buffer.BlockCopy(allBounds, 0, bounds, 0, allBounds.Length); int[] parts = allParts.ToIntArray(); ProgressMeter = new ProgressMeter(ProgressHandler, "Testing Parts and Holes", shapes.Count); for (int shp = 0; shp < shapes.Count; shp++) { ShapeRange shape = shapes[shp]; //shape.Extent = new Extent(bounds, shp * 4); for (int part = 0; part < shape.NumParts; part++) { int offset = partOffsets[shp]; int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[offset + part] + shape.StartIndex; if (part < shape.NumParts - 1) endIndex = parts[offset + part + 1] + shape.StartIndex; int count = endIndex - startIndex; PartRange partR = new PartRange(vert, shape.StartIndex, parts[offset + part], FeatureType.Polygon); partR.NumVertices = count; shape.Parts.Add(partR); } ProgressMeter.CurrentValue = shp; } ProgressMeter.Reset(); }
internal static void FillLines(string fileName, IProgressHandler progressHandler, Shapefile shapefile, FeatureType featureType) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (shapefile == null) throw new ArgumentNullException("shapefile"); if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } if (featureType != FeatureType.Line && featureType != FeatureType.Polygon) { throw new NotSupportedException(); } var header = shapefile.Header; // Check to ensure that the fileName is the correct shape type switch (featureType) { case FeatureType.Line: if (header.ShapeType != ShapeType.PolyLine && header.ShapeType != ShapeType.PolyLineM && header.ShapeType != ShapeType.PolyLineZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } break; case FeatureType.Polygon: if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } break; } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. var shapeHeaders = shapefile.ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = false, isZ = false; switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: isM = true; break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: isZ = true; isM = true; break; } int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List<ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(featureType, shapefile.CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount }; Debug.Assert(shape.RecordNumber == shp + 1); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; // Num Parts shape.NumParts = reader.ReadInt32(); totalPartsCount += shape.NumParts; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; var partsInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } int mArrayInd = 0; if (isZ) { zArray = new double[totalPointsCount]; } int zArrayInd = 0; int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + shp * 50.0 / numShapes); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) continue; reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4 + 32 + 2 * 4, SeekOrigin.Current); // Skip first bytes // Read parts var partsBytes = reader.ReadBytes(4 * shape.NumParts); Buffer.BlockCopy(partsBytes, 0, parts, partsInd, partsBytes.Length); partsInd += 4 * shape.NumParts; // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int part = 0; part < shape.NumParts; part++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[partsOffset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; var partR = new PartRange(vert, shape.StartIndex, parts[partsOffset + part], featureType) { NumVertices = count }; shape.Parts.Add(partR); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to // determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints) { goto case ShapeType.PolyLineM; } break; } } if (isM) shapefile.M = mArray; if (isZ) shapefile.Z = zArray; shapefile.ShapeIndices = shapeIndices; shapefile.Vertex = vert; } progressMeter.Reset(); }
/// <summary> /// Saves the file to a new location /// </summary> /// <param name="fileName">The fileName to save</param> /// <param name="overwrite">Boolean that specifies whether or not to overwrite the existing file</param> public override void SaveAs(string fileName, bool overwrite) { if (IndexMode) { SaveAsIndexed(fileName, overwrite); return; } string dir = Path.GetDirectoryName(fileName); if (dir != null && !Directory.Exists(dir)) { Directory.CreateDirectory(dir); } if (File.Exists(fileName)) { if (fileName != Filename && overwrite == false) { throw new IOException("File exists."); } File.Delete(fileName); string shx = Path.ChangeExtension(fileName, ".shx"); if (File.Exists(shx)) { File.Delete(shx); } } InvalidateEnvelope(); if (CoordinateType == CoordinateType.Regular) { Header.ShapeType = ShapeType.MultiPoint; } if (CoordinateType == CoordinateType.M) { Header.ShapeType = ShapeType.MultiPointM; } if (CoordinateType == CoordinateType.Z) { Header.ShapeType = ShapeType.MultiPointZ; } Header.SetExtent(MyExtent); Header.ShxLength = 50 + 4 * Features.Count; Header.SaveAs(fileName); BufferedBinaryWriter bbWriter = new BufferedBinaryWriter(fileName); BufferedBinaryWriter indexWriter = new BufferedBinaryWriter(Header.ShxFilename); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words int contentLength = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); foreach (IFeature f in Features) { offset += contentLength; // adding the previous content length from each loop calculates the word offset List <Coordinate> points = new List <Coordinate>(); contentLength = 20; for (int iPart = 0; iPart < f.NumGeometries; iPart++) { IList <Coordinate> coords = f.BasicGeometry.GetBasicGeometryN(iPart).Coordinates; foreach (Coordinate coord in coords) { points.Add(coord); } } if (Header.ShapeType == ShapeType.MultiPoint) { contentLength += points.Count * 8; } if (Header.ShapeType == ShapeType.MultiPointM) { contentLength += 8; // mmin, mmax contentLength += points.Count * 12; } if (Header.ShapeType == ShapeType.MultiPointZ) { contentLength += 16; // mmin, mmax, zmin, zmax contentLength += points.Count * 16; } // Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- indexWriter.Write(offset, false); // Byte 0 Offset Integer 1 Big indexWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big // X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- bbWriter.Write(fid + 1, false); // Byte 0 Record Integer 1 Big bbWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big bbWriter.Write((int)Header.ShapeType); // Byte 8 Shape Integer 1 Little if (Header.ShapeType == ShapeType.NullShape) { continue; } bbWriter.Write(f.Envelope.Minimum.X); // Byte 12 Xmin Double 1 Little bbWriter.Write(f.Envelope.Minimum.Y); // Byte 20 Ymin Double 1 Little bbWriter.Write(f.Envelope.Maximum.X); // Byte 28 Xmax Double 1 Little bbWriter.Write(f.Envelope.Maximum.Y); // Byte 36 Ymax Double 1 Little bbWriter.Write(points.Count); // Byte 44 #Points Integer 1 Little // Byte X Points Point #Points Little foreach (Coordinate coord in points) { bbWriter.Write(coord.X); bbWriter.Write(coord.Y); } if (Header.ShapeType == ShapeType.MultiPointZ) { bbWriter.Write(f.Envelope.Minimum.Z); bbWriter.Write(f.Envelope.Maximum.Z); foreach (Coordinate coord in points) { bbWriter.Write(coord.Z); } } if (Header.ShapeType == ShapeType.MultiPointM || Header.ShapeType == ShapeType.MultiPointZ) { if (f.Envelope == null) { bbWriter.Write(0.0); bbWriter.Write(0.0); } else { bbWriter.Write(f.Envelope.Minimum.M); bbWriter.Write(f.Envelope.Maximum.M); } foreach (Coordinate coord in points) { bbWriter.Write(coord.M); } } ProgressMeter.CurrentValue = fid; fid++; offset += 4; } ProgressMeter.Reset(); bbWriter.Close(); indexWriter.Close(); offset += contentLength; WriteFileLength(Filename, offset); UpdateAttributes(); SaveProjection(); }
/// <summary> /// Gets the count of members that match the expression /// </summary> /// <param name="expressions">The string expression to test</param> /// <param name="progressHandler">THe progress handler that can also cancel the counting</param> /// <param name="maxSampleSize">The integer maximum sample size from which to draw counts. If this is negative, it will not be used.</param> /// <returns>The integer count of the members that match the expression.</returns> public override int[] GetCounts(string[] expressions, ICancelProgressHandler progressHandler, int maxSampleSize) { if (AttributesPopulated) { return(base.GetCounts(expressions, progressHandler, maxSampleSize)); } int[] counts = new int[expressions.Length]; // The most common case would be no filter expression, in which case the count is simply the number of shapes. bool requiresRun = false; for (int iex = 0; iex < expressions.Length; iex++) { if (!string.IsNullOrEmpty(expressions[iex])) { requiresRun = true; } else { counts[iex] = NumRows(); } } if (!requiresRun) { return(counts); } AttributePager ap = new AttributePager(this, 5000); ProgressMeter pm = new ProgressMeter(progressHandler, "Calculating Counts", ap.NumPages()); // Don't bother to use a sampling approach if the number of rows is on the same order of magnitude as the number of samples. if (maxSampleSize > 0 && maxSampleSize < NumRows() / 2) { DataTable sample = new DataTable(); sample.Columns.AddRange(GetColumns()); Dictionary <int, int> usedRows = new Dictionary <int, int>(); int samplesPerPage = maxSampleSize / ap.NumPages(); Random rnd = new Random(DateTime.Now.Millisecond); for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < samplesPerPage; i++) { int row; do { row = rnd.Next(ap.StartIndex, ap.StartIndex + ap.PageSize); }while (usedRows.ContainsKey(row)); usedRows.Add(row, row); sample.Rows.Add(ap.Row(row).ItemArray); } ap.MoveNext(); pm.CurrentValue = page; if (progressHandler.Cancel) { break; } } for (int i = 0; i < expressions.Length; i++) { try { DataRow[] dr = sample.Select(expressions[i]); counts[i] += dr.Length; } catch (Exception ex) { Debug.WriteLine(ex); } } pm.Reset(); return(counts); } for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < expressions.Length; i++) { IDataRow[] dr = ap[page].Select(expressions[i]); counts[i] += dr.Length; } pm.CurrentValue = page; if (progressHandler.Cancel) { break; } } pm.Reset(); return(counts); }
/// <summary> /// This will resample the cells. /// If the cell size is zero, this will default to the shorter of the width or height /// divided by 256. /// </summary> /// <param name="input1">the input raster.</param> /// <param name="cellHeight">The new cell height or null.</param> /// <param name="cellWidth">The new cell width or null.</param> /// <param name="outputFileName">The string name of the output raster.</param> /// <param name="progressHandler">An interface for handling the progress messages.</param> /// <returns>The resampled raster.</returns> public static IRaster Resample(IRaster input1, double cellHeight, double cellWidth, string outputFileName, IProgressHandler progressHandler) { if (input1 == null) { return null; } Extent envelope = input1.Bounds.Extent; if (cellHeight == 0) { cellHeight = envelope.Height / 256; } if (cellWidth == 0) { cellWidth = envelope.Width / 256; } //Calculate new number of columns and rows int noOfCol = Convert.ToInt32(Math.Abs(envelope.Width / cellWidth)); int noOfRow = Convert.ToInt32(Math.Abs(envelope.Height / cellHeight)); IRaster output = Raster.CreateRaster(outputFileName, string.Empty, noOfCol, noOfRow, 1, input1.DataType, new[] { string.Empty }); RasterBounds bound = new RasterBounds(noOfRow, noOfCol, envelope); output.Bounds = bound; output.NoDataValue = input1.NoDataValue; RcIndex index1; int max = (output.Bounds.NumRows); ProgressMeter pm = new ProgressMeter(progressHandler, "ReSize Cells", max); //Loop through every cell for new value for (int i = 0; i < max; i++) { for (int j = 0; j < output.Bounds.NumColumns; j++) { //Project the cell position to Map Coordinate cellCenter = output.CellToProj(i, j); index1 = input1.ProjToCell(cellCenter); double val; if (index1.Row <= input1.EndRow && index1.Column <= input1.EndColumn && index1.Row > -1 && index1.Column > -1) { val = input1.Value[index1.Row, index1.Column] == input1.NoDataValue ? output.NoDataValue : input1.Value[index1.Row, index1.Column]; } else { val = output.NoDataValue; } output.Value[i, j] = val; } pm.CurrentValue = i; } output.Save(); pm.Reset(); return output; }
private void FillIndexes(string fileName, IProgressHandler progressHandler) { if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; var numShapes = ShapeHeaders.Count; _extents = new List<Extent>(numShapes); using (var reader = new FileStream(fileName, FileMode.Open)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 100.0 / numShapes); var extent = ReadExtent(shp, reader); _extents.Add(extent); } } progressMeter.Reset(); }
/// <summary> /// Render the full raster block by block, and then save the values to the pyramid raster. /// This will probably be nasty and time consuming, but what can you do. /// </summary> /// <param name="pyrFile"></param> /// <param name="progressHandler"></param> /// <returns></returns> public IImageData CreatePyramidImage(string pyrFile, IProgressHandler progressHandler) { PyramidImage py = new PyramidImage(pyrFile, DataSet.Bounds); int width = DataSet.Bounds.NumColumns; int blockHeight = 32000000 / width; if (blockHeight > DataSet.Bounds.NumRows) blockHeight = DataSet.Bounds.NumRows; int numBlocks = (int)Math.Ceiling(DataSet.Bounds.NumRows / (double)blockHeight); int count = DataSet.NumRows; if (_symbolizer.ShadedRelief.IsUsed) { count = count * 2; } ProgressMeter pm = new ProgressMeter(progressHandler, "Creating Pyramids", count); PerformanceCounter pcRemaining = new PerformanceCounter("Memory", "Available Bytes"); Process proc = Process.GetCurrentProcess(); long mem; long freeRAM; for (int j = 0; j < numBlocks; j++) { int h = blockHeight; if (j == numBlocks - 1) { h = DataSet.Bounds.NumRows - j * blockHeight; } mem = proc.PrivateMemorySize64 / 1000000; freeRAM = Convert.ToInt64(pcRemaining.NextValue()) / 1000000; Debug.WriteLine("Memory before: " + mem + ", " + freeRAM + " remaining."); pm.BaseMessage = "Reading from Raster"; pm.SendProgress(); using (IRaster r = DataSet.ReadBlock(0, j * blockHeight, width, h)) { byte[] vals = new byte[h * 4 * width]; r.DrawToBitmap(Symbolizer, vals, width * 4, pm); pm.BaseMessage = "Writing to Pyramids"; pm.SendProgress(); py.WriteWindow(vals, j * blockHeight, 0, h, width, 0); Symbolizer.HillShade = null; } mem = proc.PrivateMemorySize64 / 1000000; freeRAM = Convert.ToInt64(pcRemaining.NextValue()) / 1000000; Debug.WriteLine("Memory after: " + mem + "Mb | " + freeRAM + " remaining Mb."); } pm.Reset(); py.ProgressHandler = ProgressHandler; py.CreatePyramids(); py.WriteHeader(pyrFile); return py; }
/// <summary> /// Creates a bitmap based on the specified RasterSymbolizer /// </summary> /// <param name="bitmap"> the bitmap to paint to</param> /// <param name="progressHandler">The progress handler</param> public void PaintShadingToBitmap(Bitmap bitmap, IProgressHandler progressHandler) { BitmapData bmpData; if (_hillshade == null) { return; } // Create a new Bitmap and use LockBits combined with Marshal.Copy to get an array of bytes to work with. Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height); try { bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } catch (ArgumentException ex) { if (ex.ParamName == "format") { throw new BitmapFormatException(); } throw; } int numBytes = bmpData.Stride * bmpData.Height; byte[] rgbData = new byte[numBytes]; Marshal.Copy(bmpData.Scan0, rgbData, 0, numBytes); float[][] hillshade = _hillshade; ProgressMeter pm = new ProgressMeter(progressHandler, SymbologyMessageStrings.DesktopRasterExt_PaintingHillshade, bitmap.Height); if (bitmap.Width * bitmap.Height < 100000) pm.StepPercent = 50; if (bitmap.Width * bitmap.Height < 500000) pm.StepPercent = 10; if (bitmap.Width * bitmap.Height < 1000000) pm.StepPercent = 5; for (int row = 0; row < bitmap.Height; row++) { for (int col = 0; col < bitmap.Width; col++) { int offset = row * bmpData.Stride + col * 4; byte b = rgbData[offset]; byte g = rgbData[offset + 1]; byte r = rgbData[offset + 2]; // rgbData[offset + 3] = a; don't worry about alpha int red = Convert.ToInt32(r * hillshade[row][col]); int green = Convert.ToInt32(g * hillshade[row][col]); int blue = Convert.ToInt32(b * hillshade[row][col]); if (red > 255) red = 255; if (green > 255) green = 255; if (blue > 255) blue = 255; if (red < 0) red = 0; if (green < 0) green = 0; if (blue < 0) blue = 0; b = (byte)blue; r = (byte)red; g = (byte)green; rgbData[offset] = b; rgbData[offset + 1] = g; rgbData[offset + 2] = r; } pm.CurrentValue = row; } pm.Reset(); // Copy the values back into the bitmap Marshal.Copy(rgbData, 0, bmpData.Scan0, numBytes); bitmap.UnlockBits(bmpData); }
private void CreateUniqueCategories(string fieldName, IAttributeSource source, ICancelProgressHandler progressHandler) { Breaks = GetUniqueValues(fieldName, source, progressHandler); string fieldExpression = "[" + fieldName.ToUpper() + "]"; ClearCategories(); bool isStringField = CheckFieldType(fieldName, source); ProgressMeter pm = new ProgressMeter(progressHandler, "Building Feature Categories", Breaks.Count); List<double> sizeRamp = GetSizeSet(Breaks.Count); List<Color> colorRamp = GetColorSet(Breaks.Count); for (int colorIndex = 0; colorIndex < Breaks.Count; colorIndex++) { Break brk = Breaks[colorIndex]; //get the color for the category Color randomColor = colorRamp[colorIndex]; double size = sizeRamp[colorIndex]; IFeatureCategory cat = CreateNewCategory(randomColor, size) as IFeatureCategory; if (cat != null) { //cat.SelectionSymbolizer = _selectionSymbolizer.Copy(); cat.LegendText = brk.Name; if (isStringField) cat.FilterExpression = fieldExpression + "= '" + brk.Name.Replace("'", "''") + "'"; else cat.FilterExpression = fieldExpression + "=" + brk.Name; AddCategory(cat); } colorIndex++; pm.CurrentValue = colorIndex; } pm.Reset(); }
/// <summary> /// This tests each feature of the input /// </summary> /// <param name="self">This featureSet</param> /// <param name="other">The featureSet to perform intersection with</param> /// <param name="joinType">The attribute join type</param> /// <param name="progHandler">A progress handler for status messages</param> /// <returns>An IFeatureSet with the intersecting features, broken down based on the join Type</returns> public static IFeatureSet Intersection(this IFeatureSet self, IFeatureSet other, FieldJoinType joinType, IProgressHandler progHandler) { IFeatureSet result = null; ProgressMeter pm = new ProgressMeter(progHandler, "Calculating Intersection", self.Features.Count); if (joinType == FieldJoinType.All) { result = CombinedFields(self, other); // Intersection is symmetric, so only consider I X J where J <= I if (!self.AttributesPopulated) self.FillAttributes(); if (!other.AttributesPopulated) other.FillAttributes(); int i = 0; foreach (IFeature selfFeature in self.Features) { List<IFeature> potentialOthers = other.Select(selfFeature.Envelope.ToExtent()); foreach (IFeature otherFeature in potentialOthers) { selfFeature.Intersection(otherFeature, result, joinType); } pm.CurrentValue = i; i++; } pm.Reset(); } if (joinType == FieldJoinType.LocalOnly) { if (!self.AttributesPopulated) self.FillAttributes(); result = new FeatureSet(); result.CopyTableSchema(self); result.FeatureType = self.FeatureType; IFeature union; pm = new ProgressMeter(progHandler, "Calculating Union", other.Features.Count); if (other.Features != null && other.Features.Count > 0) { union = other.Features[0]; for (int i = 1; i < other.Features.Count; i++) { union = union.Union(other.Features[i]); pm.CurrentValue = i; } pm.Reset(); pm = new ProgressMeter(progHandler, "Calculating Intersections", self.NumRows()); Extent otherEnvelope = new Extent(union.Envelope); for (int shp = 0; shp < self.ShapeIndices.Count; shp++) { if (!self.ShapeIndices[shp].Extent.Intersects(otherEnvelope)) continue; IFeature selfFeature = self.GetFeature(shp); selfFeature.Intersection(union, result, joinType); pm.CurrentValue = shp; } pm.Reset(); } } if (joinType == FieldJoinType.ForeignOnly) { if (!other.AttributesPopulated) other.FillAttributes(); result = new FeatureSet(); result.CopyTableSchema(other); IFeature union; if (self.Features != null && self.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", self.Features.Count); union = self.Features[0]; for (int i = 1; i < self.Features.Count; i++) { union = union.Union(self.Features[i]); pm.CurrentValue = i; } pm.Reset(); if (other.Features != null) { pm = new ProgressMeter(progHandler, "Calculating Intersection", other.Features.Count); int j = 0; foreach (IFeature otherFeature in other.Features) { IFeature test = otherFeature.Intersection(union, result, joinType); if (test.BasicGeometry != null) { result.Features.Add(test); } pm.CurrentValue = j; j++; } } pm.Reset(); } } return result; }
/// <summary> /// Creates a new raster with the specified cell size. If the cell size /// is zero, this will default to the shorter of the width or height /// divided by 256. If the cell size produces a raster that is greater /// than 8, 000 pixels in either dimension, it will be re-sized to /// create an 8, 000 length or width raster. /// </summary> /// <param name="fs">The featureset to convert to a raster.</param> /// <param name="extent">Force the raster to this specified extent.</param> /// <param name="cellSize">The double extent of the cell.</param> /// <param name="fieldName">The integer field index of the file.</param> /// <param name="outputFileName">The fileName of the raster to create.</param> /// <param name="driverCode">The optional GDAL driver code to use if using GDAL /// for a format that is not discernable from the file extension. An empty string /// is usually perfectly acceptable here.</param> /// <param name="options">For GDAL rasters, they can be created with optional parameters /// passed in as a string array. In most cases an empty string is perfectly acceptable.</param> /// <param name="progressHandler">An interface for handling the progress messages.</param> /// <returns>Generates a raster from the vectors.</returns> public static IRaster ToRaster(IFeatureSet fs, Extent extent, double cellSize, string fieldName, string outputFileName, string driverCode, string[] options, IProgressHandler progressHandler) { Extent env = extent; if (cellSize == 0) { if (env.Width < env.Height) { cellSize = env.Width / 256; } else { cellSize = env.Height / 256; } } int w = (int)Math.Ceiling(env.Width / cellSize); if (w > 8000) { w = 8000; cellSize = env.Width / 8000; } int h = (int)Math.Ceiling(env.Height / cellSize); if (h > 8000) { h = 8000; } Bitmap bmp = new Bitmap(w, h); Graphics g = Graphics.FromImage(bmp); g.Clear(Color.Transparent); g.SmoothingMode = SmoothingMode.None; g.TextRenderingHint = TextRenderingHint.SingleBitPerPixel; g.InterpolationMode = InterpolationMode.NearestNeighbor; Hashtable colorTable; MapArgs args = new MapArgs(new Rectangle(0, 0, w, h), env, g); switch (fs.FeatureType) { case FeatureType.Polygon: { MapPolygonLayer mpl = new MapPolygonLayer(fs); PolygonScheme ps = new PolygonScheme(); colorTable = ps.GenerateUniqueColors(fs, fieldName); mpl.Symbology = ps; mpl.DrawRegions(args, new List<Extent> { env }); } break; case FeatureType.Line: { MapLineLayer mpl = new MapLineLayer(fs); LineScheme ps = new LineScheme(); colorTable = ps.GenerateUniqueColors(fs, fieldName); mpl.Symbology = ps; mpl.DrawRegions(args, new List<Extent> { env }); } break; default: { MapPointLayer mpl = new MapPointLayer(fs); PointScheme ps = new PointScheme(); colorTable = ps.GenerateUniqueColors(fs, fieldName); mpl.Symbology = ps; mpl.DrawRegions(args, new List<Extent> { env }); } break; } Type tp = fieldName == "FID" ? typeof(int) : fs.DataTable.Columns[fieldName].DataType; // We will try to convert to double if it is a string if (tp == typeof(string)) { tp = typeof(double); } InRamImageData image = new InRamImageData(bmp, env); ProgressMeter pm = new ProgressMeter(progressHandler, "Converting To Raster Cells", h); IRaster output; output = Raster.Create(outputFileName, driverCode, w, h, 1, tp, options); output.Bounds = new RasterBounds(h, w, env); double noDataValue = output.NoDataValue; if (fieldName != "FID") { // We can't use this method to calculate Max on a non-existent FID field. double dtMax = Convert.ToDouble(fs.DataTable.Compute("Max(" + fieldName + ")", "")); double dtMin = Convert.ToDouble(fs.DataTable.Compute("Min(" + fieldName + ")", "")); if (dtMin <= noDataValue && dtMax >= noDataValue) { if (dtMax != GetFieldValue(tp, "MaxValue")) { output.NoDataValue = noDataValue; } else if (dtMin != GetFieldValue(tp, "MinValue")) { output.NoDataValue = noDataValue; } } } List<RcIndex> locations = new List<RcIndex>(); List<string> failureList = new List<string>(); for (int row = 0; row < h; row++) { for (int col = 0; col < w; col++) { Color c = image.GetColor(row, col); if (c.A == 0) { output.Value[row, col] = output.NoDataValue; } else { if (colorTable.ContainsKey(c) == false) { if (c.A < 125) { output.Value[row, col] = output.NoDataValue; continue; } // Use a color matching distance to pick the closest member object val = GetCellValue(w, h, row, col, image, c, colorTable, locations); output.Value[row, col] = GetDouble(val, failureList); } else { output.Value[row, col] = GetDouble(colorTable[c], failureList); } } } pm.CurrentValue = row; } const int maxIterations = 5; int iteration = 0; while (locations.Count > 0) { List<RcIndex> newLocations = new List<RcIndex>(); foreach (RcIndex location in locations) { object val = GetCellValue(w, h, location.Row, location.Column, image, image.GetColor(location.Row, location.Column), colorTable, newLocations); output.Value[location.Row, location.Column] = GetDouble(val, failureList); } locations = newLocations; iteration++; if (iteration > maxIterations) { break; } } pm.Reset(); return output; }
/// <summary> /// This populates the Table with data from the file. /// </summary> /// <param name="numRows">In the event that the dbf file is not found, this indicates how many blank rows should exist in the attribute Table.</param> public void Fill(int numRows) { _dataRowWatch = new Stopwatch(); _dataTable.Rows.Clear(); // if we have already loaded data, clear the data. if (File.Exists(_fileName) == false) { _numRecords = numRows; _dataTable.BeginLoadData(); if (!_dataTable.Columns.Contains("FID")) { _dataTable.Columns.Add("FID", typeof(int)); } for (int row = 0; row < numRows; row++) { DataRow dr = _dataTable.NewRow(); dr["FID"] = row; _dataTable.Rows.Add(dr); } _dataTable.EndLoadData(); return; } if (!_loaded) GetRowOffsets(); Stopwatch sw = new Stopwatch(); sw.Start(); ProgressMeter = new ProgressMeter(ProgressHandler, "Reading from DBF Table...", _numRecords); if (_numRecords < 10000000) ProgressMeter.StepPercent = 5; if (_numRecords < 5000000) ProgressMeter.StepPercent = 10; if (_numRecords < 100000) ProgressMeter.StepPercent = 50; if (_numRecords < 10000) ProgressMeter.StepPercent = 100; _dataTable.BeginLoadData(); // Reading the Table elements as well as the shapes in a single progress loop. for (int row = 0; row < _numRecords; row++) { // --------- DATABASE --------- CurrentFeature = ReadTableRow(myReader); try { _dataTable.Rows.Add(ReadTableRowFromChars(row)); } catch (Exception ex) { Debug.WriteLine(ex.ToString()); _dataTable.Rows.Add(_dataTable.NewRow()); } // If a progress message needs to be updated, this will handle that. ProgressMeter.CurrentValue = row; } ProgressMeter.Reset(); _dataTable.EndLoadData(); sw.Stop(); Debug.WriteLine("Load Time:" + sw.ElapsedMilliseconds + " Milliseconds"); Debug.WriteLine("Conversion:" + _dataRowWatch.ElapsedMilliseconds + " Milliseconds"); _attributesPopulated = true; OnAttributesFilled(); }
/// <summary> /// Saves the file to a new location /// </summary> /// <param name="fileName">The fileName to save</param> /// <param name="overwrite">Boolean that specifies whether or not to overwrite the existing file</param> public override void SaveAs(string fileName, bool overwrite) { EnsureValidFileToSave(fileName, overwrite); Filename = fileName; // Set ShapeType before setting extent. if (CoordinateType == CoordinateType.Regular) { Header.ShapeType = ShapeType.MultiPoint; } if (CoordinateType == CoordinateType.M) { Header.ShapeType = ShapeType.MultiPointM; } if (CoordinateType == CoordinateType.Z) { Header.ShapeType = ShapeType.MultiPointZ; } HeaderSaveAs(Filename); if (IndexMode) { SaveAsIndexed(Filename); return; } var bbWriter = new BufferedBinaryWriter(Filename); var indexWriter = new BufferedBinaryWriter(Header.ShxFilename); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words int contentLength = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); foreach (IFeature f in Features) { offset += contentLength; // adding the previous content length from each loop calculates the word offset List <Coordinate> points = new List <Coordinate>(); contentLength = 20; for (int iPart = 0; iPart < f.Geometry.NumGeometries; iPart++) { IList <Coordinate> coords = f.Geometry.GetGeometryN(iPart).Coordinates; foreach (Coordinate coord in coords) { points.Add(coord); } } if (Header.ShapeType == ShapeType.MultiPoint) { contentLength += points.Count * 8; } if (Header.ShapeType == ShapeType.MultiPointM) { contentLength += 8; // mmin, mmax contentLength += points.Count * 12; } if (Header.ShapeType == ShapeType.MultiPointZ) { contentLength += 16; // mmin, mmax, zmin, zmax contentLength += points.Count * 16; } // Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- indexWriter.Write(offset, false); // Byte 0 Offset Integer 1 Big indexWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big // X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- bbWriter.Write(fid + 1, false); // Byte 0 Record Integer 1 Big bbWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big bbWriter.Write((int)Header.ShapeType); // Byte 8 Shape Integer 1 Little if (Header.ShapeType == ShapeType.NullShape) { continue; } bbWriter.Write(f.Geometry.EnvelopeInternal.MinX); // Byte 12 Xmin Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MinY); // Byte 20 Ymin Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MaxX); // Byte 28 Xmax Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MaxY); // Byte 36 Ymax Double 1 Little bbWriter.Write(points.Count); // Byte 44 #Points Integer 1 Little // Byte X Points Point #Points Little foreach (Coordinate coord in points) { bbWriter.Write(coord.X); bbWriter.Write(coord.Y); } if (Header.ShapeType == ShapeType.MultiPointZ) { bbWriter.Write(f.Geometry.EnvelopeInternal.Minimum.Z); bbWriter.Write(f.Geometry.EnvelopeInternal.Maximum.Z); foreach (Coordinate coord in points) { bbWriter.Write(coord.Z); } } if (Header.ShapeType == ShapeType.MultiPointM || Header.ShapeType == ShapeType.MultiPointZ) { if (f.Geometry.EnvelopeInternal == null) { bbWriter.Write(0.0); bbWriter.Write(0.0); } else { bbWriter.Write(f.Geometry.EnvelopeInternal.Minimum.M); bbWriter.Write(f.Geometry.EnvelopeInternal.Maximum.M); } foreach (Coordinate coord in points) { bbWriter.Write(coord.M); } } ProgressMeter.CurrentValue = fid; fid++; offset += 4; } ProgressMeter.Reset(); bbWriter.Close(); indexWriter.Close(); offset += contentLength; WriteFileLength(Filename, offset); UpdateAttributes(); SaveProjection(); }