private int _writeOffset; // #endregion #region Constructors /// <summary> /// Creates a new instance of BufferedBinaryReader. /// </summary> ///<param name="fileName">The string path of a file to open using this BufferedBinaryReader.</param> public BufferedBinaryWriter(string fileName) : this(fileName, null, 100000) { // This is just an overload that sends the default null value in for the progressHandler _progressHandler = DataManager.DefaultDataManager.ProgressHandler; _progressMeter = new ProgressMeter(_progressHandler); }
/// <summary> /// Creates a new instance of Smoother /// </summary> public Smoother(int stride, int width, int height, byte[] inRgbData, IProgressHandler progHandler) { _stride = stride; _rgbData = inRgbData; _result = new byte[inRgbData.Length]; _width = width; _height = height; _pm = new ProgressMeter(progHandler, "Smoothing Image", height); }
/// <summary> /// Allows overriding the dispose behavior to handle any resources in addition to what are handled in the /// image data class. /// </summary> /// <param name="disposeManagedResources">A Boolean value that indicates whether the overriding method /// should dispose managed resources, or just the unmanaged ones.</param> protected override void Dispose(bool disposeManagedResources) { if (disposeManagedResources) { Name = null; Projection = null; TypeName = null; _progressHandler = null; _progressMeter = null; } }
private void BuildPaths(MapArgs e, IEnumerable<int> indices, out List<GraphicsPath> paths) { paths = new List<GraphicsPath>(); Extent drawExtents = e.GeographicExtents; Rectangle clipRect = e.ProjToPixel(e.GeographicExtents); SoutherlandHodgman shClip = new SoutherlandHodgman(clipRect); List<GraphicsPath> graphPaths = new List<GraphicsPath>(); Dictionary<FastDrawnState, GraphicsPath> borders = new Dictionary<FastDrawnState, GraphicsPath>(); for (int selectState = 0; selectState < 2; selectState++) { foreach (IPolygonCategory category in Symbology.Categories) { FastDrawnState state = new FastDrawnState(selectState == 1, category); GraphicsPath border = new GraphicsPath(); borders.Add(state, border); graphPaths.Add(border); } } paths.AddRange(graphPaths); List<ShapeRange> shapes = DataSet.ShapeIndices; double[] vertices = DataSet.Vertex; if (ProgressReportingEnabled) { ProgressMeter = new ProgressMeter(ProgressHandler, "Building Paths", indices.Count()); } if (!DrawnStatesNeeded) { FastDrawnState state = new FastDrawnState(false, Symbology.Categories[0]); foreach (int shp in indices) { if (ProgressReportingEnabled) ProgressMeter.Next(); ShapeRange shape = shapes[shp]; if (!shape.Extent.Intersects(e.GeographicExtents)) return; if (shp >= shapes.Count) return; if (!borders.ContainsKey(state)) return; BuildPolygon(vertices, shapes[shp], borders[state], e, drawExtents.Contains(shape.Extent) ? null : shClip); } } else { FastDrawnState[] states = DrawnStates; foreach (GraphicsPath borderPath in borders.Values) { if (borderPath != null) { borderPath.FillMode = FillMode.Winding; } } foreach (int shp in indices) { if (ProgressReportingEnabled) ProgressMeter.Next(); if (shp >= shapes.Count) return; if (shp >= states.Length) { AssignFastDrawnStates(); states = DrawnStates; } if (states[shp].Visible == false) continue; ShapeRange shape = shapes[shp]; if (!shape.Extent.Intersects(e.GeographicExtents)) continue; if (drawExtents.Contains(shape.Extent)) { FastDrawnState state = states[shp]; if (!borders.ContainsKey(state)) continue; BuildPolygon(vertices, shapes[shp], borders[state], e, null); } else { FastDrawnState state = states[shp]; if (!borders.ContainsKey(state)) continue; BuildPolygon(vertices, shapes[shp], borders[state], e, shClip); } } } if (ProgressReportingEnabled) ProgressMeter.Reset(); }
/// <summary> /// This tests each feature of the input /// </summary> /// <param name="self">This featureSet</param> /// <param name="other">The featureSet to perform intersection with</param> /// <param name="joinType">The attribute join type</param> /// <param name="progHandler">A progress handler for status messages</param> /// <returns>An IFeatureSet with the intersecting features, broken down based on the join Type</returns> public static IFeatureSet Intersection(this IFeatureSet self, IFeatureSet other, FieldJoinType joinType, IProgressHandler progHandler) { IFeatureSet result = null; ProgressMeter pm = new ProgressMeter(progHandler, "Calculating Intersection", self.Features.Count); if (joinType == FieldJoinType.All) { result = CombinedFields(self, other); // Intersection is symmetric, so only consider I X J where J <= I if (!self.AttributesPopulated) { self.FillAttributes(); } if (!other.AttributesPopulated) { other.FillAttributes(); } for (int i = 0; i < self.Features.Count; i++) { IFeature selfFeature = self.Features[i]; List <IFeature> potentialOthers = other.Select(selfFeature.Geometry.EnvelopeInternal.ToExtent()); foreach (IFeature otherFeature in potentialOthers) { selfFeature.Intersection(otherFeature, result, joinType); } pm.CurrentValue = i; } pm.Reset(); } else if (joinType == FieldJoinType.LocalOnly) { if (!self.AttributesPopulated) { self.FillAttributes(); } result = new FeatureSet(); result.CopyTableSchema(self); result.FeatureType = self.FeatureType; if (other.Features != null && other.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", other.Features.Count); IFeature union = other.Features[0]; for (int i = 1; i < other.Features.Count; i++) { union = union.Union(other.Features[i].Geometry); pm.CurrentValue = i; } pm.Reset(); pm = new ProgressMeter(progHandler, "Calculating Intersections", self.NumRows()); Extent otherEnvelope = union.Geometry.EnvelopeInternal.ToExtent(); for (int shp = 0; shp < self.ShapeIndices.Count; shp++) { if (!self.ShapeIndices[shp].Extent.Intersects(otherEnvelope)) { continue; } IFeature selfFeature = self.GetFeature(shp); selfFeature.Intersection(union, result, joinType); pm.CurrentValue = shp; } pm.Reset(); } } else if (joinType == FieldJoinType.ForeignOnly) { if (!other.AttributesPopulated) { other.FillAttributes(); } result = new FeatureSet(); result.CopyTableSchema(other); result.FeatureType = other.FeatureType; if (self.Features != null && self.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", self.Features.Count); IFeature union = self.Features[0]; for (int i = 1; i < self.Features.Count; i++) { union = union.Union(self.Features[i].Geometry); pm.CurrentValue = i; } pm.Reset(); if (other.Features != null) { pm = new ProgressMeter(progHandler, "Calculating Intersection", other.Features.Count); for (int i = 0; i < other.Features.Count; i++) { other.Features[i].Intersection(union, result, FieldJoinType.LocalOnly); pm.CurrentValue = i; } } pm.Reset(); } } return(result); }
/// <summary> /// Creates a bitmap based on the specified RasterSymbolizer /// </summary> /// <param name="bitmap"> the bitmap to paint to</param> /// <param name="progressHandler">The progress handler</param> public void PaintShadingToBitmap(Bitmap bitmap, IProgressHandler progressHandler) { BitmapData bmpData; if (_hillshade == null) { return; } // Create a new Bitmap and use LockBits combined with Marshal.Copy to get an array of bytes to work with. Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height); try { bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, PixelFormat.Format32bppArgb); } catch (ArgumentException ex) { if (ex.ParamName == "format") { throw new BitmapFormatException(); } throw; } int numBytes = bmpData.Stride * bmpData.Height; byte[] rgbData = new byte[numBytes]; Marshal.Copy(bmpData.Scan0, rgbData, 0, numBytes); float[][] hillshade = _hillshade; ProgressMeter pm = new ProgressMeter(progressHandler, SymbologyMessageStrings.DesktopRasterExt_PaintingHillshade, bitmap.Height); if (bitmap.Width * bitmap.Height < 100000) pm.StepPercent = 50; if (bitmap.Width * bitmap.Height < 500000) pm.StepPercent = 10; if (bitmap.Width * bitmap.Height < 1000000) pm.StepPercent = 5; for (int row = 0; row < bitmap.Height; row++) { for (int col = 0; col < bitmap.Width; col++) { int offset = row * bmpData.Stride + col * 4; byte b = rgbData[offset]; byte g = rgbData[offset + 1]; byte r = rgbData[offset + 2]; // rgbData[offset + 3] = a; don't worry about alpha int red = Convert.ToInt32(r * hillshade[row][col]); int green = Convert.ToInt32(g * hillshade[row][col]); int blue = Convert.ToInt32(b * hillshade[row][col]); if (red > 255) red = 255; if (green > 255) green = 255; if (blue > 255) blue = 255; if (red < 0) red = 0; if (green < 0) green = 0; if (blue < 0) blue = 0; b = (byte)blue; r = (byte)red; g = (byte)green; rgbData[offset] = b; rgbData[offset + 1] = g; rgbData[offset + 2] = r; } pm.CurrentValue = row; } pm.Reset(); // Copy the values back into the bitmap Marshal.Copy(rgbData, 0, bmpData.Scan0, numBytes); bitmap.UnlockBits(bmpData); }
/// <summary> /// Creates a new instance of Smoother with IntPtr for input array /// </summary> public Smoother(int stride, int width, int height, IntPtr inRgbData, IProgressHandler progHandler) { _stride = stride; _getByte = _ => Marshal.ReadByte(inRgbData, _); _copyResult = () => Marshal.Copy(_result, 0, inRgbData, _result.Length); _result = new byte[stride*height]; _width = width; _height = height; _pm = new ProgressMeter(progHandler, "Smoothing Image", height); }
/// <summary> /// Attempts to remove a range of shapes by index. This is optimized to /// work better for large numbers. For one or two, using RemoveShapeAt might /// be faster. /// </summary> /// <param name="indices"> /// The enumerable set of indices to remove. /// </param> public void RemoveShapesAt(IEnumerable<int> indices) { if (IndexMode == false) { foreach (int index in indices) { if (index < 0 || index >= _shapeIndices.Count) continue; Features.RemoveAt(index); } InitializeVertices(); return; } List<int> remove = indices.ToList(); remove.Sort(); if (remove.Count == 0) return; List<int> remaining = new List<int>(); for (int i = 0; i < _shapeIndices.Count; i++) { if (remove.Count > 0 && remove[0] == i) { remove.Remove(i); continue; } remaining.Add(i); } List<double> vertex = new List<double>(); List<double> z = new List<double>(); List<double> m = new List<double>(); int pointTotal = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Removing Vertices", remaining.Count); foreach (int index in remaining) { if (index < 0 || index >= _shapeIndices.Count) continue; ShapeRange sr = _shapeIndices[index]; double[] xyShape = new double[sr.NumPoints * 2]; Array.Copy(_vertices, sr.StartIndex * 2, xyShape, 0, sr.NumPoints * 2); vertex.AddRange(xyShape); ///////////////////////////////////////////////////////////////// // fix to address issue http://dotspatial.codeplex.com/workitem/174 //////////////////////////////////////////////////////////////// //// remove the m values if necessary //if (CoordinateType == CoordinateType.M) //{ // double[] mShape = new double[sr.NumPoints]; // Array.Copy(_m, sr.StartIndex, mShape, 0, sr.NumPoints); // m.AddRange(mShape); //} // remove the z values if necessary if (CoordinateType == CoordinateType.Z) { double[] zShape = new double[sr.NumPoints]; Array.Copy(_z, sr.StartIndex, zShape, 0, sr.NumPoints); z.AddRange(zShape); ///////////////////////////////////////////////////////////////// // fix to address issue http://dotspatial.codeplex.com/workitem/174 //////////////////////////////////////////////////////////////// double[] mShape = new double[sr.NumPoints]; Array.Copy(_m, sr.StartIndex, mShape, 0, sr.NumPoints); m.AddRange(mShape); ///////////////////////////////////////////////////////////////// } sr.StartIndex = pointTotal; pointTotal += sr.NumPoints; ProgressMeter.Next(); } ProgressMeter.Reset(); _vertices = vertex.ToArray(); _m = m.ToArray(); _z = z.ToArray(); remove = indices.ToList(); remove.Sort(); ProgressMeter = new ProgressMeter(ProgressHandler, "Removing indices", remove.Count); List<ShapeRange> result = new List<ShapeRange>(); int myIndex = 0; foreach (ShapeRange range in _shapeIndices) { if (remove.Count > 0 && remove[0] == myIndex) { remove.RemoveAt(0); } else { result.Add(range); } ProgressMeter.Next(); myIndex++; } _shapeIndices = result; ProgressMeter.Reset(); remove = indices.ToList(); remove.Sort(); remove.Reverse(); ProgressMeter = new ProgressMeter(ProgressHandler, "Removing Attribute Rows", remove.Count); foreach (int index in remove) { if (AttributesPopulated) { DataTable.Rows.RemoveAt(index); } ProgressMeter.Next(); } ProgressMeter.Reset(); ProgressMeter = new ProgressMeter(ProgressHandler, "Reassigning part vertex pointers", _shapeIndices.Count); // Updating the vertex array means that the parts are now pointing // to the wrong array of vertices internally. This doesn't affect // rendering, but will affect selection. foreach (ShapeRange shape in _shapeIndices) { foreach (PartRange part in shape.Parts) { part.Vertices = _vertices; } } }
/// <summary> /// Populates the given streams for the shp and shx file when not in IndexMode. /// </summary> /// <param name="shpStream">Stream that is used to write the shp file.</param> /// <param name="shxStream">Stream that is used to write the shx file.</param> /// <returns>The lengths of the streams in bytes.</returns> private StreamLengthPair PopulateShpAndShxStreamsNotIndexed(Stream shpStream, Stream shxStream) { var progressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words foreach (IFeature f in Features) { bool isNullShape = false; int contentLength; if (f.Geometry.IsEmpty) { contentLength = 2; isNullShape = true; } else { contentLength = GetContentLength(Header.ShapeType); } shxStream.WriteBe(offset); shxStream.WriteBe(contentLength); shpStream.WriteBe(fid + 1); shpStream.WriteBe(contentLength); if (isNullShape) { shpStream.WriteLe((int)ShapeType.NullShape); // Byte 8 Shape Type 0 Integer 1 Little } else { shpStream.WriteLe((int)Header.ShapeType); // Byte 8 Shape Type Integer 1 Little Coordinate c = f.Geometry.Coordinates[0]; shpStream.WriteLe(c.X); shpStream.WriteLe(c.Y); if (Header.ShapeType == ShapeType.PointZ) { shpStream.WriteLe(c.Z); } if (Header.ShapeType == ShapeType.PointM || Header.ShapeType == ShapeType.PointZ) { shpStream.WriteLe(c.M); } } progressMeter.CurrentValue = fid; fid++; offset += 4; // header bytes offset += contentLength; // adding the content length from each loop calculates the word offset } progressMeter.Reset(); return(new StreamLengthPair { ShpLength = offset, ShxLength = 50 + (fid * 4) }); }
// X Y Points: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 1 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // X Y M Points: Total Length = 36 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 21 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // Byte 28 M Double 1 Little // X Y Z M Points: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 11 Integer 1 Little // Byte 12 X Double 1 Little // Byte 20 Y Double 1 Little // Byte 28 Z Double 1 Little // Byte 36 M Double 1 Little /// <summary> /// Obtains a typed list of ShapefilePoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">A string fileName.</param> /// <param name="progressHandler">Progress handler.</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { if (!CanBeRead(fileName, this, ShapeType.Point, ShapeType.PointM, ShapeType.PointZ)) { return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); var numShapes = shapeHeaders.Count; var shapeIndices = new List <ShapeRange>(numShapes); int totalPointsCount = 0; var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); var shape = new ShapeRange(FeatureType.Point, CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), StartIndex = totalPointsCount, ShapeType = (ShapeType)reader.ReadInt32() }; Debug.Assert(shape.RecordNumber == shp + 1, "The record number should equal " + shp + 1); Debug.Assert(shape.ContentLength == shapeHeaders[shp].ContentLength, "The shapes content length should equals the shapeHeaders content length."); if (shape.ShapeType == ShapeType.NullShape) { shape.NumPoints = 0; shape.NumParts = 0; } else { totalPointsCount += 1; shape.NumPoints = 1; shape.NumParts = 1; } shapeIndices.Add(shape); } double[] m = null; double[] z = null; var vert = new double[2 * totalPointsCount]; // X,Y if (Header.ShapeType == ShapeType.PointM || Header.ShapeType == ShapeType.PointZ) { m = new double[totalPointsCount]; } if (Header.ShapeType == ShapeType.PointZ) { z = new double[totalPointsCount]; } int i = 0; for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + (shp * 50.0 / numShapes)); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4, SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype) // Read X var ind = 4; vert[i * 2] = reader.ReadDouble(); ind += 8; // Read Y vert[(i * 2) + 1] = reader.ReadDouble(); ind += 8; // Read Z if (z != null) { z[i] = reader.ReadDouble(); ind += 8; } // Read M if (m != null) { if (shapeHeaders[shp].ByteLength <= ind) { m[i] = double.MinValue; } else { m[i] = reader.ReadDouble(); } } var part = new PartRange(vert, shape.StartIndex, 0, FeatureType.Point) { NumVertices = 1 }; shape.Parts.Add(part); shape.Extent = new Extent(new[] { vert[i * 2], vert[(i * 2) + 1], vert[i * 2], vert[(i * 2) + 1] }); i++; } Vertex = vert; M = m; Z = z; ShapeIndices = shapeIndices; } progressMeter.Reset(); }
/// <summary> /// If no file exists, this writes the header and no-data values. If a file exists, it will assume /// that data already has been filled in the file and will attempt to insert the data values /// as a window into the file. If you want to create a copy of the file and values, just use /// System.IO.File.Copy, it almost certainly would be much more optimized. /// </summary> /// <param name="fileName">The string fileName to write values to.</param> public void Write(string fileName) { FileStream fs; BinaryWriter bw; ProgressMeter pm = new ProgressMeter(ProgressHandler, "Writing values to " + fileName, NumRows); long expectedByteCount = NumRows * NumColumns * ByteSize; if (expectedByteCount < 1000000) { pm.StepPercent = 5; } if (expectedByteCount < 5000000) { pm.StepPercent = 10; } if (expectedByteCount < 100000) { pm.StepPercent = 50; } if (File.Exists(fileName)) { FileInfo fi = new FileInfo(fileName); // if the following test fails, then the target raster doesn't fit the bill for pasting into, so clear it and write a new one. if (fi.Length == HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile) { WriteHeader(fileName); // assume that we already have a file set up for us, and just write the window of values into the appropriate place. fs = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None, ByteSize * NumColumns); fs.Seek(HeaderSize, SeekOrigin.Begin); fs.Seek(ByteSize * StartRow, SeekOrigin.Current); bw = new BinaryWriter(fs); // encoding doesn't matter because we don't have characters for (int row = 0; row < NumRows; row++) { fs.Seek(StartColumn * ByteSize, SeekOrigin.Current); for (int col = 0; col < NumColumns; col++) { // this is the only line that is type dependant, but I don't want to type check on every value bw.Write(Data[row][col]); } fs.Flush(); // Since I am buffering, make sure that I write the buffered data before seeking fs.Seek((NumColumnsInFile - EndColumn - 1) * ByteSize, SeekOrigin.Current); pm.CurrentValue = row; } pm.Reset(); bw.Close(); return; } // If we got here, either the file didn't exist or didn't match the specifications correctly, so write a new one. Debug.WriteLine("The size of the file was " + fi.Length + " which didn't match the expected " + HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile); File.Delete(fileName); } if (File.Exists(fileName)) { File.Delete(fileName); } WriteHeader(fileName); // Open as append and it will automatically skip the header for us. fs = new FileStream(fileName, FileMode.Append, FileAccess.Write, FileShare.None, ByteSize * NumColumnsInFile); bw = new BinaryWriter(fs); // the row and column counters here are relative to the whole file, not just the window that is currently in memory. pm.EndValue = NumRowsInFile; int noDataValue = Convert.ToInt32(NoDataValue); for (int row = 0; row < NumRowsInFile; row++) { for (int col = 0; col < NumColumnsInFile; col++) { if (row < StartRow || row > EndRow || col < StartColumn || col > EndColumn) { bw.Write(Convert.ToInt32(noDataValue)); } else { bw.Write(Data[row - StartRow][col - StartColumn]); } } pm.CurrentValue = row; } fs.Flush(); // flush anything that hasn't gotten written yet. pm.Reset(); bw.Close(); }
/// <summary> /// Copies the contents from the specified sourceRaster into this sourceRaster. If both rasters are InRam, this does not affect the files. /// </summary> /// <param name="sourceRaster">The raster of values to paste into this raster. If the CellWidth and CellHeight values do not match between the files, /// an exception will be thrown. If the sourceRaster overlaps with the edge of this raster, only the intersecting region will be /// pasted.</param> /// <param name="startRow">Specifies the row in this raster where the top row of the sourceRaster will be pasted </param> /// <param name="startColumn">Specifies the column in this raster where the left column of the sourceRaster will be pasted.</param> public void PasteRaster(Raster <T> sourceRaster, int startRow, int startColumn) { int byteSize = ByteSize; if (sourceRaster.DataType != typeof(int)) { throw new ArgumentException( DataStrings.ArgumentOfWrongType_S1_S2.Replace("%S1", "sourceRaster").Replace("%S2", "BinaryRaster")); } if (startRow + sourceRaster.NumRows <= 0) { return; // sourceRaster is above this raster } if (startColumn + sourceRaster.NumColumns <= 0) { return; // sourceRaster is left of this raster } if (startRow > NumRows) { return; // sourceRaster is below this raster } if (startColumn > NumColumns) { return; // sourceRaster is to the right of this raster } if (sourceRaster.CellWidth != CellWidth || sourceRaster.CellHeight != CellHeight) { throw new ArgumentException(DataStrings.RastersNeedSameCellSize); } // These are specified in coordinates that match the source raster int sourceStartColumn = 0; int sourceStartRow = 0; int destStartColumn = startColumn; int destStartRow = startRow; int numPasteColumns = sourceRaster.NumColumns; int numPasteRows = sourceRaster.NumRows; // adjust range to cover only the overlapping sections if (startColumn < 0) { sourceStartColumn = -startColumn; destStartColumn = 0; } if (startRow < 0) { sourceStartRow = -startRow; destStartRow = 0; } if (numPasteRows + destStartRow > NumRows) { numPasteRows = (NumRows - destStartRow); } if (numPasteColumns + destStartColumn > NumColumns) { numPasteColumns = (NumColumns - destStartRow); } if (IsInRam) { // ---------------------- RAM BASED ------------------------------------------------------ if (sourceRaster.IsInRam) { // both members are inram, so directly copy values. for (int row = 0; row < numPasteRows; row++) { for (int col = 0; col < numPasteColumns; col++) { // since we are copying direct, we don't have to do a type check on T Data[destStartRow + row][destStartColumn + col] = sourceRaster.Data[sourceStartRow + row][sourceStartColumn + col]; } } } else { FileStream fs = new FileStream(sourceRaster.Filename, FileMode.Open, FileAccess.Write, FileShare.None, (numPasteColumns) * byteSize); ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.ReadingValuesFrom_S.Replace("%S", sourceRaster. Filename), numPasteRows); fs.Seek(HeaderSize, SeekOrigin.Begin); // Position the binary reader at the top of the "sourceRaster" fs.Seek(sourceStartRow * sourceRaster.NumColumnsInFile * byteSize, SeekOrigin.Current); BinaryReader br = new BinaryReader(fs); for (int row = 0; row < numPasteRows; row++) { // Position the binary reader at the beginning of the sourceRaster fs.Seek(byteSize * sourceStartColumn, SeekOrigin.Current); for (int col = 0; col < numPasteColumns; col++) { Data[destStartRow + row][destStartColumn + col] = br.Read <T>(); } pm.CurrentValue = row; fs.Seek(byteSize * (NumColumnsInFile - sourceStartColumn - numPasteColumns), SeekOrigin.Current); } br.Close(); } // The statistics will have changed with the newly pasted data involved GetStatistics(); } else { // ----------------------------------------- FILE BASED --------------------------------- FileStream writefs = new FileStream(Filename, FileMode.Open, FileAccess.Write, FileShare.None, NumColumns * byteSize); BinaryWriter bWriter = new BinaryWriter(writefs); ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.WritingValues_S.Replace("%S", Filename), numPasteRows); writefs.Seek(HeaderSize, SeekOrigin.Begin); writefs.Seek(destStartRow * NumColumnsInFile * byteSize, SeekOrigin.Current); // advance to top of paste window area if (sourceRaster.IsInRam) { // we can just write values for (int row = 0; row < numPasteColumns; row++) { // Position the binary reader at the beginning of the sourceRaster writefs.Seek(byteSize * destStartColumn, SeekOrigin.Current); for (int col = 0; col < numPasteColumns; col++) { T val = sourceRaster.Data[sourceStartRow + row][sourceStartColumn + col]; bWriter.Write(val); } pm.CurrentValue = row; writefs.Seek(byteSize * (NumColumnsInFile - destStartColumn - numPasteColumns), SeekOrigin.Current); } } else { // Since everything is handled from a file, we don't have to type check. Just copy the bytes. FileStream readfs = new FileStream(sourceRaster.Filename, FileMode.Open, FileAccess.Read, FileShare.Read, numPasteColumns * byteSize); BinaryReader bReader = new BinaryReader(readfs); readfs.Seek(HeaderSize, SeekOrigin.Begin); readfs.Seek(sourceStartRow * sourceRaster.NumColumnsInFile * byteSize, SeekOrigin.Current); // advances to top of paste window area for (int row = 0; row < numPasteRows; row++) { readfs.Seek(sourceStartColumn * byteSize, SeekOrigin.Current); writefs.Seek(destStartColumn * byteSize, SeekOrigin.Current); byte[] rowData = bReader.ReadBytes(numPasteColumns * byteSize); bWriter.Write(rowData); readfs.Seek(sourceRaster.NumColumnsInFile - sourceStartColumn - numPasteColumns, SeekOrigin.Current); writefs.Seek(NumColumnsInFile - destStartColumn - numPasteColumns, SeekOrigin.Current); } bReader.Close(); } bWriter.Close(); } }
/// <summary> /// Obtains only the statistics for the small window specified by startRow, endRow etc. /// </summary> public new void GetWindowStatistics() { if (IsInRam) { // don't bother to do file calculations if the whole raster is in memory base.GetWindowStatistics(); return; } // The window was not in memory, so go ahead and get statistics for the window from the file. FileStream fs = new FileStream(Filename, FileMode.Open, FileAccess.Read, FileShare.Read, NumColumns * ByteSize); BinaryReader br = new BinaryReader(fs); fs.Seek(HeaderSize, SeekOrigin.Begin); ProgressMeter pm = new ProgressMeter(ProgressHandler, "Calculating Statistics for the entire raster " + Filename, NumRows); double total = 0; double sqrTotal = 0; int count = 0; int byteSize = ByteSize; // cache this for faster calcs int min = int.MaxValue; int max = int.MinValue; fs.Seek(StartRow * ByteSize * NumColumnsInFile, SeekOrigin.Current); // To top edge of the Window int noDataValue = Convert.ToInt32(NoDataValue); for (int row = 0; row < NumRows; row++) { fs.Seek(StartColumn * byteSize, SeekOrigin.Current); // to the left edge of the window for (int col = 0; col < NumColumns; col++) { int val = br.ReadInt32(); if (val == noDataValue || val <= -100000) { continue; } if (val > max) { max = val; } if (val < min) { min = val; } double dblVal = val; total += dblVal; sqrTotal += dblVal * dblVal; count++; } fs.Seek(NumColumnsInFile - EndRow - 1, SeekOrigin.Current); // skip to the end of this row. pm.CurrentValue = row; } Minimum = min; Maximum = max; NumValueCells = count; StdDeviation = (float)Math.Sqrt((sqrTotal / NumValueCells) - (total / NumValueCells) * (total / NumValueCells)); br.Close(); }
/// <summary> /// This creates a completely new raster from the windowed domain on the original raster. This new raster /// will have a separate source file, and values like NumRowsInFile will correspond to the newly created file. /// All the values will be copied to the new source file. If inRam = true and the new raster is small enough, /// the raster values will be loaded into memory. /// </summary> /// <param name="fileName"></param> /// <param name="startRow">The 0 based integer index of the top row to copy from this raster. If this raster is itself a window, 0 represents the startRow from the file.</param> /// <param name="endRow">The integer index of the bottom row to copy from this raster. The largest allowed value is NumRows - 1.</param> /// <param name="startColumn">The 0 based integer index of the leftmost column to copy from this raster. If this raster is a window, 0 represents the startColumn from the file.</param> /// <param name="endColumn">The 0 based integer index of the rightmost column to copy from this raster. The largest allowed value is NumColumns - 1</param> /// <param name="copyValues">If this is true, the values are saved to the file. If this is false and the data can be loaded into Ram, no file handling is done. Otherwise, a file of NoData values is created.</param> /// <param name="inRam">Boolean. If this is true and the window is small enough, a copy of the values will be loaded into memory.</param> /// <returns>An implementation of IRaster</returns> public new IRaster CopyWindow(string fileName, int startRow, int endRow, int startColumn, int endColumn, bool copyValues, bool inRam) { int numCols = endColumn - startColumn + 1; int numRows = endRow - startRow + 1; var result = new BinaryRaster <T>(fileName, numCols, numRows, inRam); result.Projection = Projection; // The affine coefficients defining the world file are the same except that they are translated over. Only the position of the // upper left corner changes. Everything else is the same as the previous raster. var ac = new AffineTransform(Bounds.AffineCoefficients).TransfromToCorner(startColumn, startRow); result.Bounds = new RasterBounds(result.NumRows, result.NumColumns, ac); if (IsInRam) { ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, numRows); // copy values directly using both data structures for (int row = 0; row < numRows; row++) { for (int col = 0; col < numCols; col++) { result.Data[row][col] = Data[startRow + row][startColumn + col]; } pm.CurrentValue = row; } pm.Reset(); if (result.IsInRam == false) { // Force the result raster to write itself to a file and then purge its memory. result.Write(fileName); result.Data = null; } } else { if (result.IsInRam) { // the source is not in memory, so we just read the values from the file as if opening it directly from the file. result.OpenWindow(Filename, startRow, endRow, startColumn, endColumn, true); } else { // Both sources are file based so we basically copy rows of bytes from one to the other. FileStream source = new FileStream(Filename, FileMode.Open, FileAccess.Read, FileShare.Read); result.WriteHeader(fileName); FileStream dest = new FileStream(fileName, FileMode.Append, FileAccess.Write, FileShare.None); source.Seek(HeaderSize, SeekOrigin.Begin); BinaryReader bReader = new BinaryReader(source); BinaryWriter bWriter = new BinaryWriter(dest); ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CopyingValues, numRows); // copy values directly using both data structures source.Seek(NumColumnsInFile * startRow * ByteSize, SeekOrigin.Current); for (int row = 0; row < numRows; row++) { source.Seek(numCols * ByteSize, SeekOrigin.Current); byte[] rowData = bReader.ReadBytes(ByteSize * numCols); bWriter.Write(rowData); source.Seek(NumColumnsInFile - endColumn + 1, SeekOrigin.Current); bWriter.Flush(); pm.CurrentValue = row; } pm.Reset(); } } return(result); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids() { int w = _header.ImageHeaders[0].NumColumns; int h = _header.ImageHeaders[0].NumRows; int blockHeight = 32000000 / w; if (blockHeight > h) { blockHeight = h; } int numBlocks = (int)Math.Ceiling(h / (double)blockHeight); ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", _header.ImageHeaders.Length * numBlocks); for (int block = 0; block < numBlocks; block++) { // Normally block height except for the lowest block which is usually smaller int bh = blockHeight; if (block == numBlocks - 1) { bh = h - block * blockHeight; } // Read a block of bytes into a bitmap byte[] vals = ReadWindow(block * blockHeight, 0, bh, w, 0); Bitmap bmp = new Bitmap(w, bh); BitmapData bd = bmp.LockBits(new Rectangle(0, 0, w, bh), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); Marshal.Copy(vals, 0, bd.Scan0, vals.Length); bmp.UnlockBits(bd); // cycle through the scales, and write the resulting smaller bitmap in an appropriate spot int sw = w; // scale width int sh = bh; // scale height int sbh = blockHeight; for (int scale = 1; scale < _header.ImageHeaders.Length - 1; scale++) { sw = sw / 2; sh = sh / 2; sbh = sbh / 2; if (sh == 0 || sw == 0) { break; } Bitmap subSet = new Bitmap(sw, sh); Graphics g = Graphics.FromImage(subSet); g.DrawImage(bmp, 0, 0, sw, sh); bmp.Dispose(); // since we keep getting smaller, don't bother keeping the big image in memory any more. bmp = subSet; // keep the most recent image alive for making even smaller subsets. g.Dispose(); BitmapData bdata = bmp.LockBits(new Rectangle(0, 0, sw, sh), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); byte[] res = new byte[sw * sh * 4]; Marshal.Copy(bdata.Scan0, res, 0, res.Length); bmp.UnlockBits(bdata); WriteWindow(res, sbh * block, 0, sh, sw, scale); pm.CurrentValue = block * _header.ImageHeaders.Length + scale; } vals = null; bmp.Dispose(); } pm.Reset(); }
/// <summary> /// Gets a list of all unique values of the attribute field. /// </summary> private List<Break> GetUniqueValues(string fieldName, IAttributeSource source, ICancelProgressHandler progressHandler) { ArrayList lst; bool hugeCountOk = false; if (_cachedUniqueValues.ContainsKey(fieldName)) { lst = _cachedUniqueValues[fieldName]; } else { lst = new ArrayList(); AttributePager ap = new AttributePager(source, 5000); ProgressMeter pm = new ProgressMeter(progressHandler, "Discovering Unique Values", source.NumRows()); for (int row = 0; row < source.NumRows(); row++) { object val = ap.Row(row)[fieldName] ?? "[NULL]"; if (val.ToString() == string.Empty) val = "[NULL]"; if (lst.Contains(val)) continue; lst.Add(val); if (lst.Count > 1000 && !hugeCountOk) { CancelEventArgs args = new CancelEventArgs(true); if (TooManyCategories != null) TooManyCategories(this, args); if (args.Cancel) break; hugeCountOk = true; } pm.CurrentValue = row; if (progressHandler.Cancel) break; } lst.Sort(); if (lst.Count < EditorSettings.MaxSampleCount) { _cachedUniqueValues[fieldName] = lst; } } List<Break> result = new List<Break>(); if (lst != null) { foreach (object item in lst) { result.Add(new Break(item.ToString())); } } return result; }
/// <summary> /// Finishes writing whatever is in memory to the file, closes the /// internal binary writer, the underlying file, clears the memory /// and disposes the filestream. /// </summary> public void Close() { if (_binaryWriter != null) { // Finish pasting any residual data to the file PasteBuffer(); // Close the binary writer and underlying filestream _binaryWriter.Close(); } _binaryWriter = null; _buffer = null; _progressMeter = null; // the IProgressHandler could be an undesired handle to a whole form or something if (_fileStream != null) _fileStream.Dispose(); _fileStream = null; }
/// <summary> /// This writes a window of byte values (ARGB order) to the file. This assumes that the headers already exist. /// If the headers have not been created or the bounds extend beyond the header numRows and numColumns for the /// specified scale, this will throw an exception. /// </summary> /// <param name="bytes">The byte array.</param> /// <param name="startRow">The integer start row.</param> /// <param name="startColumn">The integer start column.</param> /// <param name="numRows">The integer number of rows in the window.</param> /// <param name="numColumns">The integer number of columns in the window.</param> /// <param name="scale">The integer scale. 0 is the original image.</param> /// <param name="pm">The progress meter to advance by row. Calls Next() for each row.</param> /// <exception cref="PyramidUndefinedHeaderException">Occurs when attempting to write data before the headers are defined.</exception> /// <exception cref="PyramidOutOfBoundsException">Occurs if the range specified is outside the bounds for the specified image scale.</exception> public void WriteWindow(byte[] bytes, int startRow, int startColumn, int numRows, int numColumns, int scale, ProgressMeter pm) { if (Header == null || Header.ImageHeaders.Length <= scale || Header.ImageHeaders[scale] == null) { throw new PyramidUndefinedHeaderException(); } PyramidImageHeader ph = Header.ImageHeaders[scale]; if (startRow < 0 || startColumn < 0 || numRows + startRow > ph.NumRows || numColumns + startColumn > ph.NumColumns) { throw new PyramidOutOfBoundsException(); } if (startColumn == 0 && numColumns == ph.NumColumns) { // write all in one pass. FileStream fs = new(Filename, FileMode.OpenOrCreate, FileAccess.Write); fs.Seek(ph.Offset, SeekOrigin.Begin); fs.Seek((startRow * ph.NumColumns) * 4, SeekOrigin.Current); fs.Write(bytes, 0, bytes.Length); fs.Close(); } else { // write one row at a time FileStream fs = new(Filename, FileMode.Open, FileAccess.Write); fs.Seek(ph.Offset, SeekOrigin.Begin); fs.Seek((startRow * ph.NumColumns) * 4, SeekOrigin.Current); int before = startColumn * 4; int after = (ph.NumColumns - (startColumn + numColumns)) * 4; for (int row = startRow; row < startRow + numRows; row++) { fs.Seek(before, SeekOrigin.Current); fs.Write(bytes, (row - startRow) * numColumns * 4, numColumns * 4); fs.Seek(after, SeekOrigin.Current); pm.Next(); } fs.Write(bytes, 0, bytes.Length); fs.Close(); } }
/// <summary> /// Saves the file to a new location /// </summary> /// <param name="fileName">The fileName to save</param> /// <param name="overwrite">Boolean that specifies whether or not to overwrite the existing file</param> public override void SaveAs(string fileName, bool overwrite) { if (IndexMode) { SaveAsIndexed(fileName, overwrite); return; } string dir = Path.GetDirectoryName(fileName); if (dir != null && !Directory.Exists(dir)) { Directory.CreateDirectory(dir); } if (File.Exists(fileName)) { if (fileName != Filename && overwrite == false) throw new IOException("File exists."); File.Delete(fileName); string shx = Path.ChangeExtension(fileName, ".shx"); if (File.Exists(shx)) File.Delete(shx); } InvalidateEnvelope(); if (CoordinateType == CoordinateType.Regular) { Header.ShapeType = ShapeType.MultiPoint; } if (CoordinateType == CoordinateType.M) { Header.ShapeType = ShapeType.MultiPointM; } if (CoordinateType == CoordinateType.Z) { Header.ShapeType = ShapeType.MultiPointZ; } Header.SetExtent(MyExtent); Header.ShxLength = 50 + 4 * Features.Count; Header.SaveAs(fileName); BufferedBinaryWriter bbWriter = new BufferedBinaryWriter(fileName); BufferedBinaryWriter indexWriter = new BufferedBinaryWriter(Header.ShxFilename); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words int contentLength = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); foreach (IFeature f in Features) { offset += contentLength; // adding the previous content length from each loop calculates the word offset List<Coordinate> points = new List<Coordinate>(); contentLength = 20; for (int iPart = 0; iPart < f.NumGeometries; iPart++) { IList<Coordinate> coords = f.BasicGeometry.GetBasicGeometryN(iPart).Coordinates; foreach (Coordinate coord in coords) { points.Add(coord); } } if (Header.ShapeType == ShapeType.MultiPoint) { contentLength += points.Count * 8; } if (Header.ShapeType == ShapeType.MultiPointM) { contentLength += 8; // mmin, mmax contentLength += points.Count * 12; } if (Header.ShapeType == ShapeType.MultiPointZ) { contentLength += 16; // mmin, mmax, zmin, zmax contentLength += points.Count * 16; } // Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- indexWriter.Write(offset, false); // Byte 0 Offset Integer 1 Big indexWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big // X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- bbWriter.Write(fid + 1, false); // Byte 0 Record Integer 1 Big bbWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big bbWriter.Write((int)Header.ShapeType); // Byte 8 Shape Integer 1 Little if (Header.ShapeType == ShapeType.NullShape) { continue; } bbWriter.Write(f.Envelope.Minimum.X); // Byte 12 Xmin Double 1 Little bbWriter.Write(f.Envelope.Minimum.Y); // Byte 20 Ymin Double 1 Little bbWriter.Write(f.Envelope.Maximum.X); // Byte 28 Xmax Double 1 Little bbWriter.Write(f.Envelope.Maximum.Y); // Byte 36 Ymax Double 1 Little bbWriter.Write(points.Count); // Byte 44 #Points Integer 1 Little // Byte X Points Point #Points Little foreach (Coordinate coord in points) { bbWriter.Write(coord.X); bbWriter.Write(coord.Y); } if (Header.ShapeType == ShapeType.MultiPointZ) { bbWriter.Write(f.Envelope.Minimum.Z); bbWriter.Write(f.Envelope.Maximum.Z); foreach (Coordinate coord in points) { bbWriter.Write(coord.Z); } } if (Header.ShapeType == ShapeType.MultiPointM || Header.ShapeType == ShapeType.MultiPointZ) { if (f.Envelope == null) { bbWriter.Write(0.0); bbWriter.Write(0.0); } else { bbWriter.Write(f.Envelope.Minimum.M); bbWriter.Write(f.Envelope.Maximum.M); } foreach (Coordinate coord in points) { bbWriter.Write(coord.M); } } ProgressMeter.CurrentValue = fid; fid++; offset += 4; } ProgressMeter.Reset(); bbWriter.Close(); indexWriter.Close(); offset += contentLength; WriteFileLength(Filename, offset); UpdateAttributes(); SaveProjection(); }
/// <summary> /// Obtains a typed list of ShapefilePoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">A string fileName</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. var header = Header; // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Point && header.ShapeType != ShapeType.PointM && header.ShapeType != ShapeType.PointZ) { throw new ApplicationException(DataStrings.FileNotPoints_S.Replace("%S", fileName)); } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); var numShapes = shapeHeaders.Count; double[] m = null; double[] z = null; var vert = new double[2 * numShapes]; // X,Y if (header.ShapeType == ShapeType.PointM || header.ShapeType == ShapeType.PointZ) { m = new double[numShapes]; } if (header.ShapeType == ShapeType.PointZ) { z = new double[numShapes]; } var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 100.0 / numShapes); reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); var recordNumber = reader.ReadInt32(Endian.BigEndian); Debug.Assert(recordNumber == shp + 1); var contentLen = reader.ReadInt32(Endian.BigEndian); Debug.Assert(contentLen == shapeHeaders[shp].ContentLength); var shapeType = (ShapeType)reader.ReadInt32(); if (shapeType == ShapeType.NullShape) { if (m != null) { m[shp] = double.MinValue; } goto fin; } // Read X var ind = 4; vert[shp * 2] = reader.ReadDouble(); ind += 8; // Read Y vert[shp * 2 + 1] = reader.ReadDouble(); ind += 8; // Read Z if (z != null) { z[shp] = reader.ReadDouble(); ind += 8; } // Read M if (m != null) { if (shapeHeaders[shp].ByteLength <= ind) { m[shp] = double.MinValue; } else { m[shp] = reader.ReadDouble(); ind += 8; } } fin: var shape = new ShapeRange(FeatureType.Point) { RecordNumber = recordNumber, StartIndex = shp, ContentLength = shapeHeaders[shp].ContentLength, NumPoints = 1, NumParts = 1 }; ShapeIndices.Add(shape); var part = new PartRange(vert, shp, 0, FeatureType.Point) { NumVertices = 1 }; shape.Parts.Add(part); shape.Extent = new Extent(new[] { vert[shp * 2], vert[shp * 2 + 1], vert[shp * 2], vert[shp * 2 + 1] }); } } Vertex = vert; M = m; Z = z; progressMeter.Reset(); }
/// <summary> /// Gets the count of members that match the expression /// </summary> /// <param name="expressions">The string expression to test</param> /// <param name="progressHandler">THe progress handler that can also cancel the counting</param> /// <param name="maxSampleSize">The integer maximum sample size from which to draw counts. If this is negative, it will not be used.</param> /// <returns>The integer count of the members that match the expression.</returns> public override int[] GetCounts(string[] expressions, ICancelProgressHandler progressHandler, int maxSampleSize) { if (AttributesPopulated) return base.GetCounts(expressions, progressHandler, maxSampleSize); int[] counts = new int[expressions.Length]; // The most common case would be no filter expression, in which case the count is simply the number of shapes. bool requiresRun = false; for (int iex = 0; iex < expressions.Length; iex++) { if (!string.IsNullOrEmpty(expressions[iex])) { requiresRun = true; } else { counts[iex] = NumRows(); } } if (!requiresRun) return counts; AttributePager ap = new AttributePager(this, 5000); ProgressMeter pm = new ProgressMeter(progressHandler, "Calculating Counts", ap.NumPages()); // Don't bother to use a sampling approach if the number of rows is on the same order of magnitude as the number of samples. if (maxSampleSize > 0 && maxSampleSize < NumRows() / 2) { DataTable sample = new DataTable(); sample.Columns.AddRange(GetColumns()); Dictionary<int, int> usedRows = new Dictionary<int, int>(); int samplesPerPage = maxSampleSize / ap.NumPages(); Random rnd = new Random(DateTime.Now.Millisecond); for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < samplesPerPage; i++) { int row; do { row = rnd.Next(ap.StartIndex, ap.StartIndex + ap.PageSize); } while (usedRows.ContainsKey(row)); usedRows.Add(row, row); sample.Rows.Add(ap.Row(row).ItemArray); } ap.MoveNext(); pm.CurrentValue = page; if (progressHandler.Cancel) break; //Application.DoEvents(); } for (int i = 0; i < expressions.Length; i++) { try { DataRow[] dr = sample.Select(expressions[i]); counts[i] += dr.Length; } catch (Exception ex) { Debug.WriteLine(ex); } } pm.Reset(); return counts; } for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < expressions.Length; i++) { DataRow[] dr = ap[page].Select(expressions[i]); counts[i] += dr.Length; } pm.CurrentValue = page; if (progressHandler.Cancel) break; //Application.DoEvents(); } pm.Reset(); return counts; }
// X Y MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 8 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // X Y M MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 28 Integer 1 Little // Byte 12 Box (Xmin - Ymax) Double 4 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // Byte X* Mmin Double 1 Little // Byte X+8* Mmax Double 1 Little // Byte X+16* Marray Double NumPoints Little // X = 48 + (16 * NumPoints) // * = optional // X Y Z M MultiPoints // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 18 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumPoints Integer 1 Little // Byte 48 Points Point NumPoints Little // Byte X Zmin Double 1 Little // Byte X+8 Zmax Double 1 Little // Byte X+16 Zarray Double NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y+8* Mmax Double 1 Little // Byte Y+16* Marray Double NumPoints Little // X = 48 + (16 * NumPoints) // Y = X + 16 + (8 * NumPoints) // * = optional /// <summary> /// Obtains a typed list of MultiPoint structures with double values associated with the various coordinates. /// </summary> /// <param name="fileName">Name of the file that gets loaded.</param> /// <param name="progressHandler">Progress handler</param> private void FillPoints(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (!File.Exists(fileName)) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. // Check to ensure that the fileName is the correct shape type if (Header.ShapeType != ShapeType.MultiPoint && Header.ShapeType != ShapeType.MultiPointM && Header.ShapeType != ShapeType.MultiPointZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = Header.ShapeType == ShapeType.MultiPointZ || Header.ShapeType == ShapeType.MultiPointM; bool isZ = Header.ShapeType == ShapeType.MultiPointZ; int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List <ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records might still exist in the .shp file. long offset = shapeHeaders[shp].ByteOffset; reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(FeatureType.MultiPoint, CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount, NumParts = 1 }; Debug.Assert(shape.RecordNumber == shp + 1); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; //// Num Parts totalPartsCount += 1; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; int mArrayInd = 0, zArrayInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } if (isZ) { zArray = new double[totalPointsCount]; } int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + shp * 50.0 / numShapes); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4 + 32 + 4, SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype + BoundingBox + NumPoints) // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); //Numpoints * Point (X(8) + Y(8)) Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int p = 0; p < shape.NumParts; p++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + p] + shape.StartIndex; if (p < shape.NumParts - 1) { endIndex = parts[partsOffset + p + 1] + shape.StartIndex; } int count = endIndex - startIndex; var part = new PartRange(vert, shape.StartIndex, parts[partsOffset + p], FeatureType.MultiPoint) { NumVertices = count }; shape.Parts.Add(part); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (Header.ShapeType) { case ShapeType.MultiPointM: if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.MultiPointZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to // determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints) { goto case ShapeType.MultiPointM; } break; } } if (isM) { M = mArray; } if (isZ) { Z = zArray; } ShapeIndices = shapeIndices; Vertex = vert; } progressMeter.Reset(); }
/// <summary> /// Creates a new instance of an attribute Table with no file reference /// </summary> public AttributeTable() { _deletedRows = new List<int>(); _fileType = 0x03; _progressHandler = DataManager.DefaultDataManager.ProgressHandler; _progressMeter = new ProgressMeter(_progressHandler); _dataTable = new DataTable(); _columns = new List<Field>(); }
/// <summary> /// Loads the shapes from the given file into the given shapefile. /// </summary> /// <param name="fileName">Name of the file whose shapes should get loaded.</param> /// <param name="progressHandler">ProgressHandler that shows the progress.</param> /// <param name="shapefile">Shapefile the shapes are loaded into.</param> /// <param name="featureType">FeatureType that should be inside the file.</param> /// <exception cref="ArgumentNullException">Throws an ArgumentNullException, if the shapefile is null.</exception> /// <exception cref="ArgumentException">Throws an ArgumentException, if the FeatureType is Line but the files doesn't contain lines or the FeatureType is Polygon and the file doesn't contain polygons.</exception> /// <exception cref="NotSupportedException">Throws a NotSupportedException, if a FeatureType other than Line or Polygon is passed.</exception> /// <exception cref="FileNotFoundException">Throws a FileNotFoundException, if the file whith the path from fileName doesn't exist.</exception> /// <exception cref="NullReferenceException">Throws a NullReferenceException, if the fileName is null.</exception> internal static void FillLines(string fileName, IProgressHandler progressHandler, Shapefile shapefile, FeatureType featureType) { // Check to ensure that the fileName is the correct shape type switch (featureType) { case FeatureType.Line: if (!CanBeRead(fileName, shapefile, ShapeType.PolyLine, ShapeType.PolyLineM, ShapeType.PolyLineZ)) { return; } break; case FeatureType.Polygon: if (!CanBeRead(fileName, shapefile, ShapeType.Polygon, ShapeType.PolygonM, ShapeType.PolygonZ)) { return; } break; default: throw new NotSupportedException(DataStrings.ShapeType0NotSupported); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. var header = shapefile.Header; var shapeHeaders = shapefile.ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = false, isZ = false; switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: isM = true; break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: isZ = true; isM = true; break; } int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List <ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records // might still exist in the .shp file. long offset = shapeHeaders[shp].ByteOffset; reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(featureType, shapefile.CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount }; Debug.Assert(shape.RecordNumber == shp + 1, "The shapes record number should equal" + shp + 1); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; // Num Parts shape.NumParts = reader.ReadInt32(); totalPartsCount += shape.NumParts; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; var partsInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } int mArrayInd = 0; if (isZ) { zArray = new double[totalPointsCount]; } int zArrayInd = 0; int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + (shp * 50.0 / numShapes)); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) { continue; } reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek((3 * 4) + 32 + (2 * 4), SeekOrigin.Current); // Skip first bytes (Record Number, Content Length, Shapetype + BoundingBox + NumParts, NumPoints) // Read parts var partsBytes = reader.ReadBytes(4 * shape.NumParts); // Numparts * Integer(4) = existing Parts Buffer.BlockCopy(partsBytes, 0, parts, partsInd, partsBytes.Length); partsInd += 4 * shape.NumParts; // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); // Numpoints * Point (X(8) + Y(8)) Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int part = 0; part < shape.NumParts; part++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[partsOffset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; var partR = new PartRange(vert, shape.StartIndex, parts[partsOffset + part], featureType) { NumVertices = count }; shape.Parts.Add(partR); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: if (shape.ContentLength * 2 > 44 + (4 * shape.NumParts) + (16 * shape.NumPoints)) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + (4 * shape.NumParts) + (24 * shape.NumPoints)) { goto case ShapeType.PolyLineM; } break; } } if (isM) { shapefile.M = mArray; } if (isZ) { shapefile.Z = zArray; } shapefile.ShapeIndices = shapeIndices; shapefile.Vertex = vert; } progressMeter.Reset(); }
// X Y Poly Lines: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 3 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // X Y M Poly Lines: Total Length = 34 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 23 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y + 8* Mmax Double 1 Little // Byte Y + 16* Marray Double NumPoints Little // X Y Z M Poly Lines: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 13 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y Zmin Double 1 Little // Byte Y + 8 Zmax Double 1 Little // Byte Y + 16 Zarray Double NumPoints Little // Byte Z* Mmin Double 1 Little // Byte Z+8* Mmax Double 1 Little // Byte Z+16* Marray Double NumPoints Little private void FillPolygons(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", fileName)); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. ShapefileHeader header = new ShapefileHeader(fileName); Extent = new Extent(new[] { header.Xmin, header.Ymin, header.Xmax, header.Ymax }); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List<ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // TO DO: replace with a normal reader. We no longer need Buffered Binary reader as // the buffer can be set on the underlying file stream. BufferedBinaryReader bbReader = new BufferedBinaryReader(fileName, progressHandler); if (bbReader.FileLength == 100) { // The shapefile is empty so we can simply return here bbReader.Close(); return; } // Skip the shapefile header by skipping the first 100 bytes in the shapefile bbReader.Seek(100, SeekOrigin.Begin); int numShapes = shapeHeaders.Count; int[] partOffsets = new int[numShapes]; //byte[] allBounds = new byte[numShapes * 32]; // probably all will be in one block, but use a byteBlock just in case. ByteBlock allParts = new ByteBlock(BLOCKSIZE); ByteBlock allCoords = new ByteBlock(BLOCKSIZE); bool isM = (header.ShapeType == ShapeType.PolygonM || header.ShapeType == ShapeType.PolygonZ); bool isZ = (header.ShapeType == ShapeType.PolygonZ); ByteBlock allZ = null; ByteBlock allM = null; if (isZ) { allZ = new ByteBlock(BLOCKSIZE); } if (isM) { allM = new ByteBlock(BLOCKSIZE); } int pointOffset = 0; for (int shp = 0; shp < numShapes; shp++) { // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); bbReader.Seek(offset, SeekOrigin.Begin); // Position Value Type Number Byte Order ShapeRange shape = new ShapeRange(FeatureType.Polygon); //------------------------------------ shape.RecordNumber = bbReader.ReadInt32(false); // Byte 0 Record Integer 1 Big shape.ContentLength = bbReader.ReadInt32(false); // Byte 4 Length Integer 1 Big // Setting shape type also controls extent class type. shape.ShapeType = (ShapeType)bbReader.ReadInt32(); // Byte 8 Type Integer 1 Little shape.StartIndex = pointOffset; if (shape.ShapeType == ShapeType.NullShape) { continue; } shape.Extent.MinX = bbReader.ReadDouble(); shape.Extent.MinY = bbReader.ReadDouble(); shape.Extent.MaxX = bbReader.ReadDouble(); shape.Extent.MaxY = bbReader.ReadDouble(); shape.NumParts = bbReader.ReadInt32(); // Byte 44 #Parts Integer 1 Little shape.NumPoints = bbReader.ReadInt32(); // Byte 48 #Points Integer 1 Little partOffsets[shp] = allParts.IntOffset(); allParts.Read(shape.NumParts * 4, bbReader); allCoords.Read(shape.NumPoints * 16, bbReader); pointOffset += shape.NumPoints; if (header.ShapeType == ShapeType.PolygonM) { // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) allM.Read(shape.NumPoints * 8, bbReader); } } if (header.ShapeType == ShapeType.PolygonZ) { bool hasM = shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints; IExtentZ zExt = (IExtentZ)shape.Extent; zExt.MinZ = bbReader.ReadDouble(); zExt.MaxZ = bbReader.ReadDouble(); // For Z shapefiles, the Z part is not optional. if (allZ != null) allZ.Read(shape.NumPoints * 8, bbReader); // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (hasM) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) allM.Read(shape.NumPoints * 8, bbReader); } } ShapeIndices.Add(shape); } double[] vert = allCoords.ToDoubleArray(); Vertex = vert; if (isM) M = allM.ToDoubleArray(); if (isZ) Z = allZ.ToDoubleArray(); List<ShapeRange> shapes = ShapeIndices; //double[] bounds = new double[numShapes * 4]; //Buffer.BlockCopy(allBounds, 0, bounds, 0, allBounds.Length); int[] parts = allParts.ToIntArray(); ProgressMeter = new ProgressMeter(ProgressHandler, "Testing Parts and Holes", shapes.Count); for (int shp = 0; shp < shapes.Count; shp++) { ShapeRange shape = shapes[shp]; //shape.Extent = new Extent(bounds, shp * 4); for (int part = 0; part < shape.NumParts; part++) { int offset = partOffsets[shp]; int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[offset + part] + shape.StartIndex; if (part < shape.NumParts - 1) endIndex = parts[offset + part + 1] + shape.StartIndex; int count = endIndex - startIndex; PartRange partR = new PartRange(vert, shape.StartIndex, parts[offset + part], FeatureType.Polygon); partR.NumVertices = count; shape.Parts.Add(partR); } ProgressMeter.CurrentValue = shp; } ProgressMeter.Reset(); }
/// <summary> /// Populates the given streams for the shp and shx file when not in IndexMode. /// </summary> /// <param name="shpStream">Stream that is used to write the shp file.</param> /// <param name="shxStream">Stream that is used to write the shx file.</param> /// <param name="shapefile">The shapefile that contains the features that are written.</param> /// <param name="addPoints">Function that is used to add the points from the features to the parts and points lists.</param> /// <param name="expectedZType">Indicates which Z-ShapeType the header must have for the z values to be written.</param> /// <param name="expectedMType">Indicates which M-ShapeType the header must have for the m values to be written.</param> /// <param name="withParts">Indicates whether the parts should be written.</param> /// <returns>The lengths of the streams in bytes.</returns> internal static StreamLengthPair PopulateStreamsNotIndexed(Stream shpStream, Stream shxStream, Shapefile shapefile, Action <List <int>, List <Coordinate>, IFeature> addPoints, ShapeType expectedZType, ShapeType expectedMType, bool withParts) { var progressMeter = new ProgressMeter(shapefile.ProgressHandler, "Saving (Not Indexed)...", shapefile.Features.Count); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words foreach (IFeature f in shapefile.Features) { List <int> parts = new List <int>(); List <Coordinate> points = new List <Coordinate>(); addPoints(parts, points, f); bool isNullShape = false; int contentLength; // null shapes have a contentLength of 2, all other shapes must have the same shape type if (f.Geometry.IsEmpty) { contentLength = 2; isNullShape = true; } else { contentLength = GetContentLength(parts.Count, points.Count, shapefile.Header.ShapeType); } //// Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- shxStream.WriteBe(offset); // Byte 0 Offset Integer 1 Big shxStream.WriteBe(contentLength); // Byte 4 Content Length Integer 1 Big //// X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // ------------------------------------------------------- shpStream.WriteBe(fid + 1); // Byte 0 Record Number Integer 1 Big shpStream.WriteBe(contentLength); // Byte 4 Content Length Integer 1 Big if (isNullShape) { shpStream.WriteLe((int)ShapeType.NullShape); // Byte 8 Shape Type 0 Integer 1 Little } else { shpStream.WriteLe((int)shapefile.Header.ShapeType); // Byte 8 Shape Type Integer 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MinX); // Byte 12 Xmin Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MinY); // Byte 20 Ymin Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MaxX); // Byte 28 Xmax Double 1 Little shpStream.WriteLe(f.Geometry.EnvelopeInternal.MaxY); // Byte 36 Ymax Double 1 Little if (withParts) { shpStream.WriteLe(parts.Count); // Byte 44 NumParts Integer 1 Little } shpStream.WriteLe(points.Count); // Byte 48 NumPoints Integer 1 Little if (withParts) //// Byte 52 Parts Integer NumParts Little { foreach (int part in parts) { shpStream.WriteLe(part); } } double[] xyVals = new double[points.Count * 2]; // Byte X Points Point NumPoints Little for (int i = 0; i < points.Count; i++) { xyVals[i * 2] = points[i].X; xyVals[(i * 2) + 1] = points[i].Y; } shpStream.WriteLe(xyVals); if (shapefile.Header.ShapeType == expectedZType) { shpStream.WriteLe(f.Geometry.EnvelopeInternal.Minimum.Z); shpStream.WriteLe(f.Geometry.EnvelopeInternal.Maximum.Z); double[] zVals = new double[points.Count]; for (int i = 0; i < points.Count; i++) { zVals[i] = points[i].Z; } shpStream.WriteLe(zVals); } if (shapefile.Header.ShapeType == expectedMType || shapefile.Header.ShapeType == expectedZType) { shpStream.WriteLe(f.Geometry.EnvelopeInternal.Minimum.M); shpStream.WriteLe(f.Geometry.EnvelopeInternal.Maximum.M); double[] mVals = new double[points.Count]; for (int i = 0; i < points.Count; i++) { mVals[i] = points[i].M; } shpStream.WriteLe(mVals); } } progressMeter.CurrentValue = fid; fid++; offset += 4; // header bytes offset += contentLength; // adding the content length from each loop calculates the word offset } progressMeter.Reset(); return(new StreamLengthPair { ShpLength = offset, ShxLength = 50 + (fid * 4) }); }
/// <summary> /// This tests each feature of the input /// </summary> /// <param name="self">This featureSet</param> /// <param name="other">The featureSet to perform intersection with</param> /// <param name="joinType">The attribute join type</param> /// <param name="progHandler">A progress handler for status messages</param> /// <returns>An IFeatureSet with the intersecting features, broken down based on the join Type</returns> public static IFeatureSet Intersection(this IFeatureSet self, IFeatureSet other, FieldJoinType joinType, IProgressHandler progHandler) { IFeatureSet result = null; ProgressMeter pm = new ProgressMeter(progHandler, "Calculating Intersection", self.Features.Count); if (joinType == FieldJoinType.All) { result = CombinedFields(self, other); // Intersection is symmetric, so only consider I X J where J <= I if (!self.AttributesPopulated) self.FillAttributes(); if (!other.AttributesPopulated) other.FillAttributes(); int i = 0; foreach (IFeature selfFeature in self.Features) { List<IFeature> potentialOthers = other.Select(selfFeature.Envelope.ToExtent()); foreach (IFeature otherFeature in potentialOthers) { selfFeature.Intersection(otherFeature, result, joinType); } pm.CurrentValue = i; i++; } pm.Reset(); } if (joinType == FieldJoinType.LocalOnly) { if (!self.AttributesPopulated) self.FillAttributes(); result = new FeatureSet(); result.CopyTableSchema(self); result.FeatureType = self.FeatureType; IFeature union; pm = new ProgressMeter(progHandler, "Calculating Union", other.Features.Count); if (other.Features != null && other.Features.Count > 0) { union = other.Features[0]; for (int i = 1; i < other.Features.Count; i++) { union = union.Union(other.Features[i]); pm.CurrentValue = i; } pm.Reset(); pm = new ProgressMeter(progHandler, "Calculating Intersections", self.NumRows()); Extent otherEnvelope = new Extent(union.Envelope); for (int shp = 0; shp < self.ShapeIndices.Count; shp++) { if (!self.ShapeIndices[shp].Extent.Intersects(otherEnvelope)) continue; IFeature selfFeature = self.GetFeature(shp); selfFeature.Intersection(union, result, joinType); pm.CurrentValue = shp; } pm.Reset(); } } if (joinType == FieldJoinType.ForeignOnly) { if (!other.AttributesPopulated) other.FillAttributes(); result = new FeatureSet(); result.CopyTableSchema(other); IFeature union; if (self.Features != null && self.Features.Count > 0) { pm = new ProgressMeter(progHandler, "Calculating Union", self.Features.Count); union = self.Features[0]; for (int i = 1; i < self.Features.Count; i++) { union = union.Union(self.Features[i]); pm.CurrentValue = i; } pm.Reset(); if (other.Features != null) { pm = new ProgressMeter(progHandler, "Calculating Intersection", other.Features.Count); int j = 0; foreach (IFeature otherFeature in other.Features) { IFeature test = otherFeature.Intersection(union, result, joinType); if (test.BasicGeometry != null) { result.Features.Add(test); } pm.CurrentValue = j; j++; } } pm.Reset(); } } return result; }
/// <summary> /// Saves the file to a new location /// </summary> /// <param name="fileName">The fileName to save</param> /// <param name="overwrite">Boolean that specifies whether or not to overwrite the existing file</param> public override void SaveAs(string fileName, bool overwrite) { EnsureValidFileToSave(fileName, overwrite); Filename = fileName; // Set ShapeType before setting extent. if (CoordinateType == CoordinateType.Regular) { Header.ShapeType = ShapeType.MultiPoint; } if (CoordinateType == CoordinateType.M) { Header.ShapeType = ShapeType.MultiPointM; } if (CoordinateType == CoordinateType.Z) { Header.ShapeType = ShapeType.MultiPointZ; } HeaderSaveAs(Filename); if (IndexMode) { SaveAsIndexed(Filename); return; } var bbWriter = new BufferedBinaryWriter(Filename); var indexWriter = new BufferedBinaryWriter(Header.ShxFilename); int fid = 0; int offset = 50; // the shapefile header starts at 100 bytes, so the initial offset is 50 words int contentLength = 0; ProgressMeter = new ProgressMeter(ProgressHandler, "Saving (Not Indexed)...", Features.Count); foreach (IFeature f in Features) { offset += contentLength; // adding the previous content length from each loop calculates the word offset List <Coordinate> points = new List <Coordinate>(); contentLength = 20; for (int iPart = 0; iPart < f.Geometry.NumGeometries; iPart++) { IList <Coordinate> coords = f.Geometry.GetGeometryN(iPart).Coordinates; foreach (Coordinate coord in coords) { points.Add(coord); } } if (Header.ShapeType == ShapeType.MultiPoint) { contentLength += points.Count * 8; } if (Header.ShapeType == ShapeType.MultiPointM) { contentLength += 8; // mmin, mmax contentLength += points.Count * 12; } if (Header.ShapeType == ShapeType.MultiPointZ) { contentLength += 16; // mmin, mmax, zmin, zmax contentLength += points.Count * 16; } // Index File // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- indexWriter.Write(offset, false); // Byte 0 Offset Integer 1 Big indexWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big // X Y Poly Lines // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- bbWriter.Write(fid + 1, false); // Byte 0 Record Integer 1 Big bbWriter.Write(contentLength, false); // Byte 4 Length Integer 1 Big bbWriter.Write((int)Header.ShapeType); // Byte 8 Shape Integer 1 Little if (Header.ShapeType == ShapeType.NullShape) { continue; } bbWriter.Write(f.Geometry.EnvelopeInternal.MinX); // Byte 12 Xmin Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MinY); // Byte 20 Ymin Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MaxX); // Byte 28 Xmax Double 1 Little bbWriter.Write(f.Geometry.EnvelopeInternal.MaxY); // Byte 36 Ymax Double 1 Little bbWriter.Write(points.Count); // Byte 44 #Points Integer 1 Little // Byte X Points Point #Points Little foreach (Coordinate coord in points) { bbWriter.Write(coord.X); bbWriter.Write(coord.Y); } if (Header.ShapeType == ShapeType.MultiPointZ) { bbWriter.Write(f.Geometry.EnvelopeInternal.Minimum.Z); bbWriter.Write(f.Geometry.EnvelopeInternal.Maximum.Z); foreach (Coordinate coord in points) { bbWriter.Write(coord.Z); } } if (Header.ShapeType == ShapeType.MultiPointM || Header.ShapeType == ShapeType.MultiPointZ) { if (f.Geometry.EnvelopeInternal == null) { bbWriter.Write(0.0); bbWriter.Write(0.0); } else { bbWriter.Write(f.Geometry.EnvelopeInternal.Minimum.M); bbWriter.Write(f.Geometry.EnvelopeInternal.Maximum.M); } foreach (Coordinate coord in points) { bbWriter.Write(coord.M); } } ProgressMeter.CurrentValue = fid; fid++; offset += 4; } ProgressMeter.Reset(); bbWriter.Close(); indexWriter.Close(); offset += contentLength; WriteFileLength(Filename, offset); UpdateAttributes(); SaveProjection(); }
/// <summary> /// Executes the RasterFromLAS tool. /// </summary> /// <param name="filenameLAS">The string filename of the LAS file to convert.</param> /// <param name="outputExtent">The extent of the output raster.</param> /// <param name="numRows">The integer number of rows of the output raster.</param> /// <param name="numColumns">The integer number of columns.</param> /// <param name="output">The output raster.</param> /// <param name="cancelProgressHandler">The progress handler.</param> /// <returns>Boolean, true if the method was successful.</returns> public bool Execute( string filenameLAS, Extent outputExtent, int numRows, int numColumns, IRaster output, ICancelProgressHandler cancelProgressHandler) { // create output raster output = Raster.CreateRaster( output.Filename, string.Empty, numColumns, numRows, 1, typeof(int), new[] { string.Empty }); RasterBounds bound = new RasterBounds(numRows, numColumns, outputExtent); output.Bounds = bound; output.NoDataValue = int.MinValue; ProgressMeter pm = new ProgressMeter( cancelProgressHandler, TextStrings.ConvertingLAS + filenameLAS + TextStrings.Progresstoraster + "...", numRows); for (int row = 0; row < numRows; row++) { for (int j = 0; j < output.Bounds.NumColumns; j++) { // TO DO: PING CAN ADD LAS READING AND CELL ASSIGNMENT HERE if (cancelProgressHandler.Cancel) { return false; } } pm.CurrentValue = row; } // output = Temp; output.Save(); return true; }
/// <summary> /// If no file exists, this writes the header and no-data values. If a file exists, it will assume /// that data already has been filled in the file and will attempt to insert the data values /// as a window into the file. If you want to create a copy of the file and values, just use /// System.IO.File.Copy, it almost certainly would be much more optimized. /// </summary> private void Write(string fileName) { ProgressMeter pm = new ProgressMeter(ProgressHandler, "Writing values to " + Filename, NumRows); long expectedByteCount = NumRows * NumColumns * ByteSize; if (expectedByteCount < 1000000) { pm.StepPercent = 5; } if (expectedByteCount < 5000000) { pm.StepPercent = 10; } if (expectedByteCount < 100000) { pm.StepPercent = 50; } if (File.Exists(fileName)) { FileInfo fi = new FileInfo(Filename); // if the following test fails, then the target raster doesn't fit the bill for pasting into, so clear it and write a new one. if (fi.Length == HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile) { WriteHeader(fileName); WriteRaster(Data); return; } // If we got here, either the file didn't exist or didn't match the specifications correctly, so write a new one. Debug.WriteLine("The size of the file was " + fi.Length + " which didn't match the expected " + HeaderSize + ByteSize * NumColumnsInFile * NumRowsInFile); } if (File.Exists(Filename)) { File.Delete(Filename); } WriteHeader(fileName); // Open as append and it will automatically skip the header for us. using (var bw = new BinaryWriter(new FileStream(Filename, FileMode.Append, FileAccess.Write, FileShare.None, ByteSize * NumColumnsInFile))) { // the row and column counters here are relative to the whole file, not just the window that is currently in memory. pm.EndValue = NumRowsInFile; for (int row = 0; row < NumRowsInFile; row++) { byte[] rawBytes = new byte[NumColumnsInFile * ByteSize]; T[] nd = new T[1]; nd[0] = (T)Convert.ChangeType(NoDataValue, typeof(T)); Buffer.BlockCopy(Data[row - StartRow], 0, rawBytes, StartColumn * ByteSize, NumColumns * ByteSize); for (int col = 0; col < StartColumn; col++) { Buffer.BlockCopy(nd, 0, rawBytes, col * ByteSize, ByteSize); } for (int col = EndColumn + 1; col < NumColumnsInFile; col++) { Buffer.BlockCopy(nd, 0, rawBytes, col * ByteSize, ByteSize); } bw.Write(rawBytes); pm.CurrentValue = row; } } pm.Reset(); }
/// <summary> /// Gets the values from a file based data source rather than an in memory object. /// </summary> /// <param name="source"></param> /// <param name="progressHandler"></param> public void GetValues(IAttributeSource source, ICancelProgressHandler progressHandler) { int pageSize = 100000; Values = new List<double>(); string normField = EditorSettings.NormField; string fieldName = EditorSettings.FieldName; if (source.NumRows() < EditorSettings.MaxSampleCount) { int numPages = (int)Math.Ceiling((double)source.NumRows() / pageSize); for (int ipage = 0; ipage < numPages; ipage++) { int numRows = (ipage == numPages - 1) ? source.NumRows() % pageSize : pageSize; DataTable table = source.GetAttributes(ipage * pageSize, numRows); if (!string.IsNullOrEmpty(EditorSettings.ExcludeExpression)) { DataRow[] rows = table.Select("NOT (" + EditorSettings.ExcludeExpression + ")"); foreach (DataRow row in rows) { double val; if (!double.TryParse(row[fieldName].ToString(), out val)) continue; if (double.IsNaN(val)) continue; if (normField != null) { double norm; if (!double.TryParse(row[normField].ToString(), out norm) || double.IsNaN(val)) continue; Values.Add(val / norm); continue; } Values.Add(val); } } else { foreach (DataRow row in table.Rows) { double val; if (!double.TryParse(row[fieldName].ToString(), out val)) continue; if (double.IsNaN(val)) continue; if (normField != null) { double norm; if (!double.TryParse(row[normField].ToString(), out norm) || double.IsNaN(val)) continue; Values.Add(val / norm); continue; } Values.Add(val); } } } } else { Dictionary<int, double> randomValues = new Dictionary<int, double>(); pageSize = 10000; int count = EditorSettings.MaxSampleCount; Random rnd = new Random(); AttributePager ap = new AttributePager(source, pageSize); int countPerPage = count / ap.NumPages(); ProgressMeter pm = new ProgressMeter(progressHandler, "Sampling " + count + " random values", count); for (int iPage = 0; iPage < ap.NumPages(); iPage++) { for (int i = 0; i < countPerPage; i++) { double val; double norm = 1; int index; bool failed = false; do { index = rnd.Next(ap.StartIndex, ap.StartIndex + pageSize); DataRow dr = ap.Row(index); if (!double.TryParse(dr[fieldName].ToString(), out val)) failed = true; if (normField == null) continue; if (!double.TryParse(dr[normField].ToString(), out norm)) failed = true; } while (randomValues.ContainsKey(index) || double.IsNaN(val) || failed); if (normField != null) { Values.Add(val / norm); } else { Values.Add(val); } randomValues.Add(index, val); pm.CurrentValue = i + iPage * countPerPage; } //Application.DoEvents(); if (progressHandler != null && progressHandler.Cancel) { break; } } } Values.Sort(); Statistics.Calculate(Values); }
/// <summary> /// Gets the statistics all the values. If the entire content is not currently in-ram, /// ReadRow will be used to read individual lines and performing the calculations. /// </summary> public override void GetStatistics() { ProgressMeter pm = new ProgressMeter(ProgressHandler, DataStrings.CalculatingStatistics, NumRows); T min = Global.MaximumValue <T>(); T max = Global.MinimumValue <T>(); double total = 0; double sqrTotal = 0; int count = 0; if (IsInRam == false || this.IsFullyWindowed() == false) { for (int row = 0; row < NumRowsInFile; row++) { T[] values = ReadRow(row); for (int col = 0; col < NumColumnsInFile; col++) { T val = values[col]; double dblVal = Global.ToDouble(val); if (dblVal == NoDataValue) { continue; } if (val.CompareTo(max) > 0) { max = val; } if (val.CompareTo(min) < 0) { min = val; } total += dblVal; sqrTotal += dblVal * dblVal; count++; } pm.CurrentValue = row; } } else { for (int row = 0; row < NumRows; row++) { for (int col = 0; col < NumColumns; col++) { T val = Data[row][col]; double dblVal = Global.ToDouble(val); if (dblVal == NoDataValue) { continue; } if (val.CompareTo(max) > 0) { max = val; } if (val.CompareTo(min) < 0) { min = val; } total += dblVal; sqrTotal += dblVal * dblVal; count++; } pm.CurrentValue = row; } } Value.Updated = false; Minimum = Global.ToDouble(min); Maximum = Global.ToDouble(max); Mean = total / count; NumValueCells = count; StdDeviation = (float)Math.Sqrt((sqrTotal / NumValueCells) - (total / NumValueCells) * (total / NumValueCells)); pm.Reset(); }
private void CreateUniqueCategories(string fieldName, IAttributeSource source, ICancelProgressHandler progressHandler) { Breaks = GetUniqueValues(fieldName, source, progressHandler); string fieldExpression = "[" + fieldName.ToUpper() + "]"; ClearCategories(); bool isStringField = CheckFieldType(fieldName, source); ProgressMeter pm = new ProgressMeter(progressHandler, "Building Feature Categories", Breaks.Count); List<double> sizeRamp = GetSizeSet(Breaks.Count); List<Color> colorRamp = GetColorSet(Breaks.Count); for (int colorIndex = 0; colorIndex < Breaks.Count; colorIndex++) { Break brk = Breaks[colorIndex]; //get the color for the category Color randomColor = colorRamp[colorIndex]; double size = sizeRamp[colorIndex]; IFeatureCategory cat = CreateNewCategory(randomColor, size) as IFeatureCategory; if (cat != null) { //cat.SelectionSymbolizer = _selectionSymbolizer.Copy(); cat.LegendText = brk.Name; if (isStringField) cat.FilterExpression = fieldExpression + "= '" + brk.Name.Replace("'", "''") + "'"; else cat.FilterExpression = fieldExpression + "=" + brk.Name; AddCategory(cat); } colorIndex++; pm.CurrentValue = colorIndex; } pm.Reset(); }
/// <summary> /// Gets the count of members that match the expression /// </summary> /// <param name="expressions">The string expression to test</param> /// <param name="progressHandler">THe progress handler that can also cancel the counting</param> /// <param name="maxSampleSize">The integer maximum sample size from which to draw counts. If this is negative, it will not be used.</param> /// <returns>The integer count of the members that match the expression.</returns> public override int[] GetCounts(string[] expressions, ICancelProgressHandler progressHandler, int maxSampleSize) { if (AttributesPopulated) { return(base.GetCounts(expressions, progressHandler, maxSampleSize)); } int[] counts = new int[expressions.Length]; // The most common case would be no filter expression, in which case the count is simply the number of shapes. bool requiresRun = false; for (int iex = 0; iex < expressions.Length; iex++) { if (!string.IsNullOrEmpty(expressions[iex])) { requiresRun = true; } else { counts[iex] = NumRows(); } } if (!requiresRun) { return(counts); } AttributePager ap = new AttributePager(this, 5000); ProgressMeter pm = new ProgressMeter(progressHandler, "Calculating Counts", ap.NumPages()); // Don't bother to use a sampling approach if the number of rows is on the same order of magnitude as the number of samples. if (maxSampleSize > 0 && maxSampleSize < NumRows() / 2) { DataTable sample = new DataTable(); sample.Columns.AddRange(GetColumns()); Dictionary <int, int> usedRows = new Dictionary <int, int>(); int samplesPerPage = maxSampleSize / ap.NumPages(); Random rnd = new Random(DateTime.Now.Millisecond); for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < samplesPerPage; i++) { int row; do { row = rnd.Next(ap.StartIndex, ap.StartIndex + ap.PageSize); } while (usedRows.ContainsKey(row)); usedRows.Add(row, row); sample.Rows.Add(ap.Row(row).ItemArray); } ap.MoveNext(); pm.CurrentValue = page; if (progressHandler.Cancel) { break; } //Application.DoEvents(); } for (int i = 0; i < expressions.Length; i++) { try { DataRow[] dr = sample.Select(expressions[i]); counts[i] += dr.Length; } catch (Exception ex) { Debug.WriteLine(ex); } } pm.Reset(); return(counts); } for (int page = 0; page < ap.NumPages(); page++) { for (int i = 0; i < expressions.Length; i++) { DataRow[] dr = ap[page].Select(expressions[i]); counts[i] += dr.Length; } pm.CurrentValue = page; if (progressHandler.Cancel) { break; } //Application.DoEvents(); } pm.Reset(); return(counts); }
/// <summary> /// Executes the slope generation raster. /// </summary> /// <param name="raster">The input altitude raster.</param> /// <param name="inZFactor">The double precision multiplicative scaling factor for elevation values.</param> /// <param name="slopeInPercent">A boolean parameter that clarifies the nature of the slope values. If this is true, the values represent percent slope.</param> /// <param name="result">The output slope raster.</param> /// <param name="cancelProgressHandler">The progress handler.</param> /// <returns>A boolean value, true if the process was successful.</returns> public static bool GetSlope(IRaster raster, double inZFactor, bool slopeInPercent, ref IRaster result, ICancelProgressHandler cancelProgressHandler) { //Validates the input and output data if (raster == null || result == null) { return false; } int noOfCol = raster.NumColumns; int noOfRow = raster.NumRows; //Create the new raster with the appropriate dimensions IRaster temp = Raster.CreateRaster("SlopeRaster.bgd", string.Empty, noOfCol, noOfRow, 1, typeof(double), new[] { string.Empty }); temp.NoDataValue = raster.NoDataValue; temp.Bounds = raster.Bounds; temp.Projection = raster.Projection; ProgressMeter progMeter = null; try { if (cancelProgressHandler != null) progMeter = new ProgressMeter(cancelProgressHandler, "Calculating Slope", temp.NumRows); for (int i = 0; i < temp.NumRows; i++) { if (cancelProgressHandler != null) { progMeter.Next(); if ((i % 100) == 0) { progMeter.SendProgress(); // HACK: DoEvents messes up the normal flow of your application. System.Windows.Forms.Application.DoEvents(); } } for (int j = 0; j < temp.NumColumns; j++) { if (i > 0 && i < temp.NumRows - 1 && j > 0 && j < temp.NumColumns - 1) { double z1 = raster.Value[i - 1, j - 1]; double z2 = raster.Value[i - 1, j]; double z3 = raster.Value[i - 1, j + 1]; double z4 = raster.Value[i, j - 1]; double z5 = raster.Value[i, j + 1]; double z6 = raster.Value[i + 1, j - 1]; double z7 = raster.Value[i + 1, j]; double z8 = raster.Value[i + 1, j + 1]; //3rd Order Finite Difference slope algorithm double dZdX = inZFactor * ((z3 - z1) + (2 * (z5 - z4)) + (z8 - z6)) / (8 * raster.CellWidth); double dZdY = inZFactor * ((z1 - z6) + (2 * (z2 - z7)) + (z3 - z8)) / (8 * raster.CellHeight); double slope = Math.Atan(Math.Sqrt((dZdX * dZdX) + (dZdY * dZdY))) * (180 / Math.PI); //change to radius and in percentage if (slopeInPercent) { slope = (Math.Tan(slope * Math.PI / 180)) * 100; } temp.Value[i, j] = slope; if (cancelProgressHandler != null && cancelProgressHandler.Cancel) { return false; } } else { temp.Value[i, j] = temp.NoDataValue; } if (cancelProgressHandler != null && cancelProgressHandler.Cancel) { return false; } } } result = temp; if (result.IsFullyWindowed()) { result.Save(); return true; } return false; } finally { if (progMeter != null) { progMeter.Reset(); System.Windows.Forms.Application.DoEvents(); } } }
/// <summary> /// Clips a raster with a polygon feature /// </summary> /// <param name="polygon">The clipping polygon feature</param> /// <param name="input">The input raster object</param> /// <param name="outputFileName">the output raster file name</param> /// <param name="cancelProgressHandler">Progress handler for reporting progress status and cancelling the operation</param> /// <remarks>We assume there is only one part in the polygon. /// Traverses the raster with a vertical scan line from left to right, bottom to top</remarks> /// <returns></returns> public static IRaster ClipRasterWithPolygon(IFeature polygon, IRaster input, string outputFileName, ICancelProgressHandler cancelProgressHandler = null) { //if the polygon is completely outside the raster if (!input.ContainsFeature(polygon)) return input; if (cancelProgressHandler != null) cancelProgressHandler.Progress(null, 16, "Retrieving the borders."); List<Border> borders = GetBorders(polygon); if (cancelProgressHandler != null) cancelProgressHandler.Progress(null, 33, "Copying raster."); //create output raster IRaster output = Raster.CreateRaster(outputFileName, input.DriverCode, input.NumColumns, input.NumRows, 1, input.DataType, new[] { string.Empty }); output.Bounds = input.Bounds.Copy(); output.NoDataValue = input.NoDataValue; if (input.CanReproject) { output.Projection = input.Projection; } // set all initial values of Output to NoData for (int i = 0; i < output.NumRows; i++) { for (int j = 0; j < output.NumColumns; j++) { output.Value[i, j] = output.NoDataValue; } } double xStart = GetXStart(polygon, output); int columnStart = GetStartColumn(polygon, output); //get the index of first column double xCurrent = xStart; ProgressMeter pm = new ProgressMeter(cancelProgressHandler, "Clipping Raster", output.NumColumns); pm.StepPercent = 5; pm.StartValue = 33; int col = 0; for (int columnCurrent = columnStart; columnCurrent < output.NumColumns; columnCurrent++) { xCurrent = xStart + col * output.CellWidth; var intersections = GetYIntersections(borders, xCurrent); intersections.Sort(); ParseIntersections(intersections, xCurrent, columnCurrent, output, input); // update progess meter pm.CurrentValue = xCurrent; //update counter col++; // cancel if requested if (cancelProgressHandler != null && cancelProgressHandler.Cancel) return null; } output.Save(); return output; }
/// <inheritdoc/> public void CopyFeatures(IFeatureSet source, bool copyAttributes) { ProgressMeter = new ProgressMeter(ProgressHandler, "Copying Features", ShapeIndices.Count); Vertex = source.Vertex.Copy(); _shapeIndices = new List<ShapeRange>(); foreach (ShapeRange range in source.ShapeIndices) { _shapeIndices.Add(range.Copy()); } if (copyAttributes) { foreach (DataColumn dc in source.GetColumns()) { if (dc != null) { DataColumn outCol = new DataColumn(dc.ColumnName, dc.DataType, dc.Expression, dc.ColumnMapping); Field fld = new Field(outCol); DataTable.Columns.Add(fld); } } } if (source.AttributesPopulated) { // Handle data table content directly if (!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); int i = 0; foreach (IFeature f in source.Features) { IFeature copy = AddFeature(f.BasicGeometry); copy.ShapeIndex = ShapeIndices[i]; if (copyAttributes) { copy.DataRow.ItemArray = f.DataRow.ItemArray.Copy(); } i++; } Features.ResumeEvents(); } else { // We need to copy the attributes, but just copy a datarow if (copyAttributes) { foreach (DataRow row in source.DataTable.Rows) { DataRow result = DataTable.NewRow(); result.ItemArray = row.ItemArray.Copy(); DataTable.Rows.Add(result); } } } } else { AttributesPopulated = false; // Handle data table content directly if (!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); int i = 0; foreach (IFeature f in source.Features) { IFeature result = AddFeature(f.BasicGeometry); result.ShapeIndex = ShapeIndices[i]; i++; } Features.ResumeEvents(); } if (copyAttributes) { // We need to copy the attributes, but use the page system int maxRow = NumRows(); const int pageSize = 10000; int numPages = (int)Math.Ceiling(maxRow / (double)pageSize); for (int i = 0; i < numPages; i++) { int numRows = pageSize; if (i == numPages - 1) { numRows = numPages - (pageSize * i); } DataTable dt = source.GetAttributes(i * pageSize, numRows); SetAttributes(i * pageSize, dt); } } } }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids2() { double count = _header.ImageHeaders[0].NumRows; ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", count); int prog = 0; for (int scale = 0; scale < _header.ImageHeaders.Length - 1; scale++) { PyramidImageHeader ph = _header.ImageHeaders[scale]; int rows = ph.NumRows; int cols = ph.NumColumns; // Horizontal Blur Pass byte[] r1 = ReadWindow(0, 0, 1, cols, scale); byte[] r2 = ReadWindow(1, 0, 1, cols, scale); byte[] vals = Blur(null, r1, r2); vals = DownSample(vals); WriteWindow(vals, 0, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; byte[] r3 = ReadWindow(2, 0, 1, cols, scale); vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, 1, 0, 1, cols / 2, scale + 1); prog++; pm.CurrentValue = prog; for (int row = 3; row < rows - 1; row++) { r1 = r2; r2 = r3; r3 = ReadWindow(row, 0, 1, cols, scale); prog++; pm.CurrentValue = prog; if (row % 2 == 1) continue; vals = Blur(r1, r2, r3); vals = DownSample(vals); WriteWindow(vals, row / 2 - 1, 0, 1, cols / 2, scale + 1); } if ((rows - 1) % 2 == 0) { vals = Blur(r2, r3, r2); vals = DownSample(vals); WriteWindow(vals, rows / 2 - 1, 0, 1, cols / 2, scale + 1); } prog++; pm.CurrentValue = prog; } pm.Reset(); }
internal static void FillLines(string fileName, IProgressHandler progressHandler, Shapefile shapefile, FeatureType featureType) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", "fileName")); } if (shapefile == null) throw new ArgumentNullException("shapefile"); if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } if (featureType != FeatureType.Line && featureType != FeatureType.Polygon) { throw new NotSupportedException(); } var header = shapefile.Header; // Check to ensure that the fileName is the correct shape type switch (featureType) { case FeatureType.Line: if (header.ShapeType != ShapeType.PolyLine && header.ShapeType != ShapeType.PolyLineM && header.ShapeType != ShapeType.PolyLineZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } break; case FeatureType.Polygon: if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } break; } if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. var shapeHeaders = shapefile.ReadIndexFile(fileName); int numShapes = shapeHeaders.Count; bool isM = false, isZ = false; switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: isM = true; break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: isZ = true; isM = true; break; } int totalPointsCount = 0; int totalPartsCount = 0; var shapeIndices = new List<ShapeRange>(numShapes); var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; using (var reader = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 65536)) { var boundsBytes = new byte[4 * 8]; var bounds = new double[4]; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 50.0 / numShapes); // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); reader.Seek(offset, SeekOrigin.Begin); var shape = new ShapeRange(featureType, shapefile.CoordinateType) { RecordNumber = reader.ReadInt32(Endian.BigEndian), ContentLength = reader.ReadInt32(Endian.BigEndian), ShapeType = (ShapeType)reader.ReadInt32(), StartIndex = totalPointsCount }; Debug.Assert(shape.RecordNumber == shp + 1); if (shape.ShapeType != ShapeType.NullShape) { // Bounds reader.Read(boundsBytes, 0, boundsBytes.Length); Buffer.BlockCopy(boundsBytes, 0, bounds, 0, boundsBytes.Length); shape.Extent.MinX = bounds[0]; shape.Extent.MinY = bounds[1]; shape.Extent.MaxX = bounds[2]; shape.Extent.MaxY = bounds[3]; // Num Parts shape.NumParts = reader.ReadInt32(); totalPartsCount += shape.NumParts; // Num Points shape.NumPoints = reader.ReadInt32(); totalPointsCount += shape.NumPoints; } shapeIndices.Add(shape); } var vert = new double[totalPointsCount * 2]; var vertInd = 0; var parts = new int[totalPartsCount]; var partsInd = 0; double[] mArray = null, zArray = null; if (isM) { mArray = new double[totalPointsCount]; } int mArrayInd = 0; if (isZ) { zArray = new double[totalPointsCount]; } int zArrayInd = 0; int partsOffset = 0; for (int shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(50 + shp * 50.0 / numShapes); var shape = shapeIndices[shp]; if (shape.ShapeType == ShapeType.NullShape) continue; reader.Seek(shapeHeaders[shp].ByteOffset, SeekOrigin.Begin); reader.Seek(3 * 4 + 32 + 2 * 4, SeekOrigin.Current); // Skip first bytes // Read parts var partsBytes = reader.ReadBytes(4 * shape.NumParts); Buffer.BlockCopy(partsBytes, 0, parts, partsInd, partsBytes.Length); partsInd += 4 * shape.NumParts; // Read points var pointsBytes = reader.ReadBytes(8 * 2 * shape.NumPoints); Buffer.BlockCopy(pointsBytes, 0, vert, vertInd, pointsBytes.Length); vertInd += 8 * 2 * shape.NumPoints; // Fill parts shape.Parts.Capacity = shape.NumParts; for (int part = 0; part < shape.NumParts; part++) { int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[partsOffset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[partsOffset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; var partR = new PartRange(vert, shape.StartIndex, parts[partsOffset + part], featureType) { NumVertices = count }; shape.Parts.Add(partR); } partsOffset += shape.NumParts; // Fill M and Z arrays switch (header.ShapeType) { case ShapeType.PolyLineM: case ShapeType.PolygonM: if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { var mExt = (IExtentM)shape.Extent; mExt.MinM = reader.ReadDouble(); mExt.MaxM = reader.ReadDouble(); var mBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(mBytes, 0, mArray, mArrayInd, mBytes.Length); mArrayInd += 8 * shape.NumPoints; } break; case ShapeType.PolyLineZ: case ShapeType.PolygonZ: var zExt = (IExtentZ)shape.Extent; zExt.MinZ = reader.ReadDouble(); zExt.MaxZ = reader.ReadDouble(); var zBytes = reader.ReadBytes(8 * shape.NumPoints); Buffer.BlockCopy(zBytes, 0, zArray, zArrayInd, zBytes.Length); zArrayInd += 8 * shape.NumPoints; // These are listed as "optional" but there isn't a good indicator of how to // determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints) { goto case ShapeType.PolyLineM; } break; } } if (isM) shapefile.M = mArray; if (isZ) shapefile.Z = zArray; shapefile.ShapeIndices = shapeIndices; shapefile.Vertex = vert; } progressMeter.Reset(); }
/// <summary> /// This assumes that the base image has been written to the file. This will now attempt to calculate /// the down-sampled images. /// </summary> public void CreatePyramids() { int w = _header.ImageHeaders[0].NumColumns; int h = _header.ImageHeaders[0].NumRows; int blockHeight = 32000000 / w; if (blockHeight > h) blockHeight = h; int numBlocks = (int)Math.Ceiling(h / (double)blockHeight); ProgressMeter pm = new ProgressMeter(ProgressHandler, "Generating Pyramids", _header.ImageHeaders.Length * numBlocks); for (int block = 0; block < numBlocks; block++) { // Normally block height except for the lowest block which is usually smaller int bh = blockHeight; if (block == numBlocks - 1) bh = h - block * blockHeight; // Read a block of bytes into a bitmap byte[] vals = ReadWindow(block * blockHeight, 0, bh, w, 0); Bitmap bmp = new Bitmap(w, bh); BitmapData bd = bmp.LockBits(new Rectangle(0, 0, w, bh), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb); Marshal.Copy(vals, 0, bd.Scan0, vals.Length); bmp.UnlockBits(bd); // cycle through the scales, and write the resulting smaller bitmap in an appropriate spot int sw = w; // scale width int sh = bh; // scale height int sbh = blockHeight; for (int scale = 1; scale < _header.ImageHeaders.Length - 1; scale++) { sw = sw / 2; sh = sh / 2; sbh = sbh / 2; if (sh == 0 || sw == 0) { break; } Bitmap subSet = new Bitmap(sw, sh); Graphics g = Graphics.FromImage(subSet); g.DrawImage(bmp, 0, 0, sw, sh); bmp.Dispose(); // since we keep getting smaller, don't bother keeping the big image in memory any more. bmp = subSet; // keep the most recent image alive for making even smaller subsets. g.Dispose(); BitmapData bdata = bmp.LockBits(new Rectangle(0, 0, sw, sh), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); byte[] res = new byte[sw * sh * 4]; Marshal.Copy(bdata.Scan0, res, 0, res.Length); bmp.UnlockBits(bdata); WriteWindow(res, sbh * block, 0, sh, sw, scale); pm.CurrentValue = block * _header.ImageHeaders.Length + scale; } vals = null; bmp.Dispose(); } pm.Reset(); }
/// <summary> /// Creates a new instance of Smoother /// </summary> public Smoother(int stride, int width, int height, byte[] inRgbData, IProgressHandler progHandler) { _stride = stride; _getByte = _ => inRgbData[_]; _copyResult = () => Array.Copy(_result, 0, inRgbData, 0, _result.Length); _result = new byte[inRgbData.Length]; _width = width; _height = height; _pm = new ProgressMeter(progHandler, "Smoothing Image", height); }
/// <summary> /// This writes a window of byte values (ARGB order) to the file. This assumes that the headers already exist. /// If the headers have not been created or the bounds extend beyond the header numRows and numColumns for the /// specified scale, this will throw an exception. /// </summary> /// <param name="bytes">The byte array</param> /// <param name="startRow">The integer start row</param> /// <param name="startColumn">The integer start column</param> /// <param name="numRows">The integer number of rows in the window</param> /// <param name="numColumns">The integer number of columns in the window</param> /// <param name="scale">The integer scale. 0 is the original image.</param> /// <exception cref="PyramidUndefinedHeaderException">Occurs when attempting to write data before the headers are defined</exception> /// <exception cref="PyramidOutOfBoundsException">Occurs if the range specified is outside the bounds for the specified image scale</exception> public void WriteWindow(byte[] bytes, int startRow, int startColumn, int numRows, int numColumns, int scale) { ProgressMeter pm = new ProgressMeter(ProgressHandler, "Saving Pyramid Values", numRows); WriteWindow(bytes, startRow, startColumn, numRows, numColumns, scale, pm); pm.Reset(); }
private void FillIndexes(string fileName, IProgressHandler progressHandler) { if (new FileInfo(fileName).Length == 100) { // the file is empty so we are done reading return; } var progressMeter = new ProgressMeter(progressHandler, "Reading from " + Path.GetFileName(fileName)) { StepPercent = 5 }; var numShapes = ShapeHeaders.Count; _extents = new List<Extent>(numShapes); using (var reader = new FileStream(fileName, FileMode.Open)) { for (var shp = 0; shp < numShapes; shp++) { progressMeter.CurrentPercent = (int)(shp * 100.0 / numShapes); var extent = ReadExtent(shp, reader); _extents.Add(extent); } } progressMeter.Reset(); }
/// <summary> /// This writes a window of byte values (ARGB order) to the file. This assumes that the headers already exist. /// If the headers have not been created or the bounds extend beyond the header numRows and numColumns for the /// specified scale, this will throw an exception. /// </summary> /// <param name="bytes">The byte array</param> /// <param name="startRow">The integer start row</param> /// <param name="startColumn">The integer start column</param> /// <param name="numRows">The integer number of rows in the window</param> /// <param name="numColumns">The integer number of columns in the window</param> /// <param name="scale">The integer scale. 0 is the original image.</param> /// <param name="pm">The progress meter to advance by row. Calls Next() for each row.</param> /// <exception cref="PyramidUndefinedHeaderException">Occurs when attempting to write data before the headers are defined</exception> /// <exception cref="PyramidOutOfBoundsException">Occurs if the range specified is outside the bounds for the specified image scale</exception> public void WriteWindow(byte[] bytes, int startRow, int startColumn, int numRows, int numColumns, int scale, ProgressMeter pm) { if (_header == null || _header.ImageHeaders.Length <= scale || _header.ImageHeaders[scale] == null) { throw new PyramidUndefinedHeaderException(); } PyramidImageHeader ph = _header.ImageHeaders[scale]; if (startRow < 0 || startColumn < 0 || numRows + startRow > ph.NumRows || numColumns + startColumn > ph.NumColumns) { throw new PyramidOutOfBoundsException(); } if (startColumn == 0 && numColumns == ph.NumColumns) { // write all in one pass. FileStream fs = new FileStream(Filename, FileMode.OpenOrCreate, FileAccess.Write); fs.Seek(ph.Offset, SeekOrigin.Begin); fs.Seek((startRow * ph.NumColumns) * 4, SeekOrigin.Current); fs.Write(bytes, 0, bytes.Length); fs.Close(); } else { // write one row at a time FileStream fs = new FileStream(Filename, FileMode.Open, FileAccess.Write); fs.Seek(ph.Offset, SeekOrigin.Begin); fs.Seek((startRow * ph.NumColumns) * 4, SeekOrigin.Current); int before = startColumn * 4; int after = (ph.NumColumns - (startColumn + numColumns)) * 4; for (int row = startRow; row < startRow + numRows; row++) { fs.Seek(before, SeekOrigin.Current); fs.Write(bytes, (row - startRow) * numColumns * 4, numColumns * 4); fs.Seek(after, SeekOrigin.Current); pm.Next(); } fs.Write(bytes, 0, bytes.Length); fs.Close(); } }
/// <summary> /// This allows overriding layers to handle any memory cleanup. /// </summary> /// <param name="disposeManagedResources">True if managed resources should be set to null.</param> protected virtual void Dispose(bool disposeManagedResources) { if (_isDisposed) { return; } if (disposeManagedResources) { LayerSelected = null; ZoomToLayer = null; ShowProperties = null; FinishedLoading = null; SelectionChanged = null; base.ContextMenuItems = null; MyExtent = null; base.LegendText = null; _progressHandler = null; _progressMeter = null; _invalidatedRegion = null; _mapFrame = null; _propertyDialogProvider = null; } // Since the InnerDataset likely contains unmanaged memory constructs, dispose of it here. if (_dataSet != null) { _dataSet.UnlockDispose(); if (!_dataSet.IsDisposeLocked) { _dataSet.Dispose(); } } if (_editCopy != null) _editCopy.Dispose(); _isDisposed = true; }
/// <summary> /// Creates a new instance of an attribute Table with no file reference /// </summary> public AttributeTable() { _deletedRows = new List<int>(); _fileType = 0x03; _encoding = Encoding.Default; _ldid = DbaseLocaleRegistry.GetLanguageDriverId(_encoding); _progressHandler = DataManager.DefaultDataManager.ProgressHandler; _progressMeter = new ProgressMeter(_progressHandler); _dataTable = new DataTable(); _columns = new List<Field>(); }
/// <summary> /// This populates the Table with data from the file. /// </summary> /// <param name="numRows">In the event that the dbf file is not found, this indicates how many blank rows should exist in the attribute Table.</param> public void Fill(int numRows) { _dataRowWatch = new Stopwatch(); _dataTable.Rows.Clear(); // if we have already loaded data, clear the data. if (File.Exists(_fileName) == false) { _numRecords = numRows; _dataTable.BeginLoadData(); if (!_dataTable.Columns.Contains("FID")) { _dataTable.Columns.Add("FID", typeof(int)); } for (int row = 0; row < numRows; row++) { DataRow dr = _dataTable.NewRow(); dr["FID"] = row; _dataTable.Rows.Add(dr); } _dataTable.EndLoadData(); return; } if (!_loaded) GetRowOffsets(); Stopwatch sw = new Stopwatch(); sw.Start(); ProgressMeter = new ProgressMeter(ProgressHandler, "Reading from DBF Table...", _numRecords); if (_numRecords < 10000000) ProgressMeter.StepPercent = 5; if (_numRecords < 5000000) ProgressMeter.StepPercent = 10; if (_numRecords < 100000) ProgressMeter.StepPercent = 50; if (_numRecords < 10000) ProgressMeter.StepPercent = 100; _dataTable.BeginLoadData(); // Reading the Table elements as well as the shapes in a single progress loop. for (int row = 0; row < _numRecords; row++) { // --------- DATABASE --------- CurrentFeature = ReadTableRow(myReader); try { _dataTable.Rows.Add(ReadTableRowFromChars(row)); } catch (Exception ex) { Debug.WriteLine(ex.ToString()); _dataTable.Rows.Add(_dataTable.NewRow()); } // If a progress message needs to be updated, this will handle that. ProgressMeter.CurrentValue = row; } ProgressMeter.Reset(); _dataTable.EndLoadData(); sw.Stop(); Debug.WriteLine("Load Time:" + sw.ElapsedMilliseconds + " Milliseconds"); Debug.WriteLine("Conversion:" + _dataRowWatch.ElapsedMilliseconds + " Milliseconds"); _attributesPopulated = true; OnAttributesFilled(); }
// X Y Poly Lines: Total Length = 28 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 3 Integer 1 Little // Byte 12 Xmin Double 1 Little // Byte 20 Ymin Double 1 Little // Byte 28 Xmax Double 1 Little // Byte 36 Ymax Double 1 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // X Y M Poly Lines: Total Length = 34 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 23 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y* Mmin Double 1 Little // Byte Y + 8* Mmax Double 1 Little // Byte Y + 16* Marray Double NumPoints Little // X Y Z M Poly Lines: Total Length = 44 Bytes // --------------------------------------------------------- // Position Value Type Number Byte Order // --------------------------------------------------------- // Byte 0 Record Number Integer 1 Big // Byte 4 Content Length Integer 1 Big // Byte 8 Shape Type 13 Integer 1 Little // Byte 12 Box Double 4 Little // Byte 44 NumParts Integer 1 Little // Byte 48 NumPoints Integer 1 Little // Byte 52 Parts Integer NumParts Little // Byte X Points Point NumPoints Little // Byte Y Zmin Double 1 Little // Byte Y + 8 Zmax Double 1 Little // Byte Y + 16 Zarray Double NumPoints Little // Byte Z* Mmin Double 1 Little // Byte Z+8* Mmax Double 1 Little // Byte Z+16* Marray Double NumPoints Little private void FillPolygons(string fileName, IProgressHandler progressHandler) { // Check to ensure the fileName is not null if (fileName == null) { throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", fileName)); } if (File.Exists(fileName) == false) { throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName)); } // Get the basic header information. ShapefileHeader header = new ShapefileHeader(fileName); Extent = new Extent(new[] { header.Xmin, header.Ymin, header.Xmax, header.Ymax }); // Check to ensure that the fileName is the correct shape type if (header.ShapeType != ShapeType.Polygon && header.ShapeType != ShapeType.PolygonM && header.ShapeType != ShapeType.PolygonZ) { throw new ArgumentException(DataStrings.FileNotLines_S.Replace("%S", fileName)); } // Reading the headers gives us an easier way to track the number of shapes and their overall length etc. List <ShapeHeader> shapeHeaders = ReadIndexFile(fileName); // TO DO: replace with a normal reader. We no longer need Buffered Binary reader as // the buffer can be set on the underlying file stream. BufferedBinaryReader bbReader = new BufferedBinaryReader(fileName, progressHandler); if (bbReader.FileLength == 100) { // The shapefile is empty so we can simply return here bbReader.Close(); return; } // Skip the shapefile header by skipping the first 100 bytes in the shapefile bbReader.Seek(100, SeekOrigin.Begin); int numShapes = shapeHeaders.Count; int[] partOffsets = new int[numShapes]; //byte[] allBounds = new byte[numShapes * 32]; // probably all will be in one block, but use a byteBlock just in case. ByteBlock allParts = new ByteBlock(BLOCKSIZE); ByteBlock allCoords = new ByteBlock(BLOCKSIZE); bool isM = (header.ShapeType == ShapeType.PolygonM || header.ShapeType == ShapeType.PolygonZ); bool isZ = (header.ShapeType == ShapeType.PolygonZ); ByteBlock allZ = null; ByteBlock allM = null; if (isZ) { allZ = new ByteBlock(BLOCKSIZE); } if (isM) { allM = new ByteBlock(BLOCKSIZE); } int pointOffset = 0; for (int shp = 0; shp < numShapes; shp++) { // Read from the index file because some deleted records // might still exist in the .shp file. long offset = (shapeHeaders[shp].ByteOffset); bbReader.Seek(offset, SeekOrigin.Begin); // Position Value Type Number Byte Order ShapeRange shape = new ShapeRange(FeatureType.Polygon); //------------------------------------ shape.RecordNumber = bbReader.ReadInt32(false); // Byte 0 Record Integer 1 Big shape.ContentLength = bbReader.ReadInt32(false); // Byte 4 Length Integer 1 Big // Setting shape type also controls extent class type. shape.ShapeType = (ShapeType)bbReader.ReadInt32(); // Byte 8 Type Integer 1 Little shape.StartIndex = pointOffset; if (shape.ShapeType == ShapeType.NullShape) { continue; } shape.Extent.MinX = bbReader.ReadDouble(); shape.Extent.MinY = bbReader.ReadDouble(); shape.Extent.MaxX = bbReader.ReadDouble(); shape.Extent.MaxY = bbReader.ReadDouble(); shape.NumParts = bbReader.ReadInt32(); // Byte 44 #Parts Integer 1 Little shape.NumPoints = bbReader.ReadInt32(); // Byte 48 #Points Integer 1 Little partOffsets[shp] = allParts.IntOffset(); allParts.Read(shape.NumParts * 4, bbReader); allCoords.Read(shape.NumPoints * 16, bbReader); pointOffset += shape.NumPoints; if (header.ShapeType == ShapeType.PolygonM) { // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (shape.ContentLength * 2 > 44 + 4 * shape.NumParts + 16 * shape.NumPoints) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) { allM.Read(shape.NumPoints * 8, bbReader); } } } if (header.ShapeType == ShapeType.PolygonZ) { bool hasM = shape.ContentLength * 2 > 60 + 4 * shape.NumParts + 24 * shape.NumPoints; IExtentZ zExt = (IExtentZ)shape.Extent; zExt.MinZ = bbReader.ReadDouble(); zExt.MaxZ = bbReader.ReadDouble(); // For Z shapefiles, the Z part is not optional. if (allZ != null) { allZ.Read(shape.NumPoints * 8, bbReader); } // These are listed as "optional" but there isn't a good indicator of // how to determine if they were added. // To handle the "optional" M values, check the contentLength for the feature. // The content length does not include the 8-byte record header and is listed in 16-bit words. if (hasM) { IExtentM mExt = (IExtentM)shape.Extent; mExt.MinM = bbReader.ReadDouble(); mExt.MaxM = bbReader.ReadDouble(); if (allM != null) { allM.Read(shape.NumPoints * 8, bbReader); } } } ShapeIndices.Add(shape); } double[] vert = allCoords.ToDoubleArray(); Vertex = vert; if (isM) { M = allM.ToDoubleArray(); } if (isZ) { Z = allZ.ToDoubleArray(); } List <ShapeRange> shapes = ShapeIndices; //double[] bounds = new double[numShapes * 4]; //Buffer.BlockCopy(allBounds, 0, bounds, 0, allBounds.Length); int[] parts = allParts.ToIntArray(); ProgressMeter = new ProgressMeter(ProgressHandler, "Testing Parts and Holes", shapes.Count); for (int shp = 0; shp < shapes.Count; shp++) { ShapeRange shape = shapes[shp]; //shape.Extent = new Extent(bounds, shp * 4); for (int part = 0; part < shape.NumParts; part++) { int offset = partOffsets[shp]; int endIndex = shape.NumPoints + shape.StartIndex; int startIndex = parts[offset + part] + shape.StartIndex; if (part < shape.NumParts - 1) { endIndex = parts[offset + part + 1] + shape.StartIndex; } int count = endIndex - startIndex; PartRange partR = new PartRange(vert, shape.StartIndex, parts[offset + part], FeatureType.Polygon); partR.NumVertices = count; shape.Parts.Add(partR); } ProgressMeter.CurrentValue = shp; } ProgressMeter.Reset(); }