public ShapefileDataReader(IStreamProviderRegistry streamProviderRegistry, IGeometryFactory geometryFactory) { if (streamProviderRegistry == null) { throw new ArgumentNullException("streamProviderRegistry"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; _dbfReader = new DbaseFileReader(streamProviderRegistry); _shpReader = new ShapefileReader(streamProviderRegistry, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Method to write a dummy dbase file /// </summary> /// <param name="dbfWriter">The dbase file writer</param> /// <param name="recordCount">The number of records</param> public static void WriteDummyDbf(DbaseFileWriter dbfWriter, int recordCount) { // Create the dummy header var dbfHeader = new DbaseFileHeader { NumRecords = recordCount }; // add some dummy column dbfHeader.AddColumn("Description", 'C', 20, 0); // Write the header dbfWriter.Write(dbfHeader); // Write the features for (var i = 0; i < recordCount; i++) { var columnValues = new List <double> { i }; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.WriteEndOfDbf(); dbfWriter.Close(); }
public static DbaseFileHeader GetHeader(IStreamProviderRegistry streamProviderRegistry) { DbaseFileHeader header = new DbaseFileHeader(); using (var stream = streamProviderRegistry[StreamTypes.Data].OpenRead()) using (var reader = new BinaryReader(stream)) header.ReadHeader(reader, streamProviderRegistry[StreamTypes.Data] is FileStreamProvider ? ((FileStreamProvider)streamProviderRegistry[StreamTypes.Data]).Path : null); return(header); }
/// <summary> /// </summary> protected void ReadHeader() { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(_dbfReader, _parent._encodingProvider); // how many records remain _readPosition = _header.HeaderLength; }
/// <summary> /// Method to write <paramref name="header"/> to the dbase stream /// </summary> /// <param name="header">The header to write</param> public void Write(DbaseFileHeader header) { //if (header == null) // throw new ArgumentNullException("header"); ////if (_recordsWritten) //// throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); //_headerWritten = true; //if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) //{ // header.Encoding = _encoding; //} //header.WriteHeader(_writer); //_header = header; if (header == null) { throw new ArgumentNullException("header"); } //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; // Set the encoding if not already done. if (header.Encoding == null) { header.Encoding = _encoding; } if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) { header.Encoding = _encoding; } // Get the current position var currentPosition = (int)_writer.BaseStream.Position; //Header should always be written first in the file if (_writer.BaseStream.Position != 0) { _writer.Seek(0, SeekOrigin.Begin); } // actually write the header header.WriteHeader(_writer); // reposition the stream if (currentPosition != 0) { _writer.Seek(currentPosition, SeekOrigin.Begin); } _header = header; }
/// <summary> /// Write the enumeration of features to shapefile (shp, shx and dbf) /// </summary> /// <param name="filename">Filename to create</param> /// <param name="features">Enumeration of features to write, features will be enumerated once</param> /// <param name="fields">Fields that should be written, only those attributes specified here will be mapped from the feature attributetable while writing</param> /// <param name="shapeGeometryType">Type of geometries shapefile</param> /// <param name="dbfEncoding">Optional Encoding to be used when writing the DBF-file (default Windows-1252)</param> public static void WriteFeatures(string filename, IEnumerable <IFeature> features, DbaseFieldDescriptor[] fields, ShapeGeometryType shapeGeometryType, ICoordinateSystem coordinateSystem = null, Encoding dbfEncoding = null) { if (coordinateSystem != null) { string prjFile = Path.ChangeExtension(filename, ".prj"); PrjFileWriter.Write(prjFile, coordinateSystem, dbfEncoding == null ? Encoding.Default : dbfEncoding); } // Set default encoding if not specified if (dbfEncoding == null) { dbfEncoding = Encoding.GetEncoding(1252); } // Open shapefile and dbase stream writers using (var shpWriter = new ShapefileWriter(Path.ChangeExtension(filename, ".shp"), shapeGeometryType)) { using (var dbfWriter = new DbaseFileWriter(Path.ChangeExtension(filename, ".dbf"), dbfEncoding)) { var dbfHeader = new DbaseFileHeader(dbfEncoding); foreach (var field in fields) { dbfHeader.AddColumn(field.Name, field.DbaseType, field.Length, field.DecimalCount); } dbfWriter.Write(dbfHeader); var numFeatures = 0; foreach (var feature in features) { shpWriter.Write(feature.Geometry); var values = new object[fields.Length]; for (var i = 0; i < fields.Length; i++) { values[i] = feature.Attributes[fields[i].Name]; } dbfWriter.Write(values); numFeatures++; } // set the number of records dbfHeader.NumRecords = numFeatures; // Update the header dbfWriter.Write(dbfHeader); // write the end of dbase file marker dbfWriter.WriteEndOfDbf(); // close the dbase stream dbfWriter.Close(); } } }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (var dbfReader = new BinaryReader(_streamProvider.OpenRead())) { // read the header _header = new DbaseFileHeader(); _header.ReadHeader(dbfReader, _encodingProvider); } } return(_header); }
public static DbaseFileHeader GetHeader(DbaseFieldDescriptor[] dbFields, int count) { DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (DbaseFieldDescriptor dbField in dbFields) { header.AddColumn(dbField.Name, dbField.DbaseType, dbField.Length, dbField.DecimalCount); } return(header); }
/// <summary> /// Initializes a new instance of the ShapefileDataReader class. /// </summary> /// <param name="filename">The shapefile to read (minus the .shp extension)</param> /// <param name="geometryFactory">The GeometryFactory to use.</param> /// <param name="encoding">The encoding to use for reading the attribute data</param> public ShapefileDataReader(string filename, IGeometryFactory geometryFactory, Encoding encoding) { if (String.IsNullOrEmpty(filename)) { throw new ArgumentNullException("filename"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; string prjFile = Path.ChangeExtension(filename, ".prj"); CoordinateSystem = File.Exists(prjFile) ? PrjFileReader.Read(prjFile) : null; string dbfFile = Path.ChangeExtension(filename, "dbf"); _dbfReader = encoding != null ? new DbaseFileReader(dbfFile, encoding) : new DbaseFileReader(dbfFile); string shpFile = Path.ChangeExtension(filename, "shp"); _shpReader = new ShapefileReader(shpFile, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. //Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; CheckIfHasFID(); }
/// <summary> /// Gets the stub header. /// </summary> /// <param name="feature">The feature.</param> /// <param name="count">The count.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(IFeature feature, int count) { IAttributesTable attribs = feature.Attributes; string[] names = attribs.GetNames(); DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (string name in names) { Type type = attribs.GetType(name); if (type == typeof(double) || type == typeof(float)) { header.AddColumn(name, 'N', DoubleLength, DoubleDecimals); } else if (type == typeof(short) || type == typeof(ushort) || type == typeof(int) || type == typeof(uint)) { header.AddColumn(name, 'N', IntLength, IntDecimals); } else if (type == typeof(long) || type == typeof(ulong)) { header.AddColumn(name, 'N', LongLength, IntDecimals); } else if (type == typeof(string)) { header.AddColumn(name, 'C', StringLength, StringDecimals); } else if (type == typeof(bool)) { header.AddColumn(name, 'L', BoolLength, BoolDecimals); } else if (type == typeof(DateTime)) { header.AddColumn(name, 'D', DateLength, DateDecimals); } else { throw new ArgumentException("Type " + type.Name + " not supported"); } } return(header); }
/// <summary> /// Initializes a new instance of the DbaseFileWriter class using the provided <paramref name="streamProviderRegistry"/> and the default encoding /// </summary> /// <param name="streamProviderRegistry">The stream provider registry</param> public DbaseFileWriter(IStreamProviderRegistry streamProviderRegistry) : this(streamProviderRegistry, DbaseFileHeader.GetEncoding(streamProviderRegistry[StreamTypes.DataEncoding])) { }