public ShapefileDataReader(IStreamProviderRegistry streamProviderRegistry, IGeometryFactory geometryFactory) { if (streamProviderRegistry==null) throw new ArgumentNullException("streamProviderRegistry"); if (geometryFactory == null) throw new ArgumentNullException("geometryFactory"); _open = true; _dbfReader = new DbaseFileReader(streamProviderRegistry); _shpReader = new ShapefileReader(streamProviderRegistry, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) _dbaseFields[i + 1] = _dbfHeader.Fields[i]; _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Method to write a dummy dbase file /// </summary> /// <param name="dbfWriter">The dbase file writer</param> /// <param name="recordCount">The number of records</param> public static void WriteDummyDbf(DbaseFileWriter dbfWriter, int recordCount) { // Create the dummy header var dbfHeader = new DbaseFileHeader { NumRecords = recordCount }; // add some dummy column dbfHeader.AddColumn("Description", 'C', 20, 0); // Write the header dbfWriter.Write(dbfHeader); // Write the features for (int i = 0; i < recordCount; i++) { var columnValues = new List <double> { i }; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.WriteEndOfDbf(); dbfWriter.Close(); }
public ShapefileDataReader(IStreamProviderRegistry streamProviderRegistry, IGeometryFactory geometryFactory) { if (streamProviderRegistry == null) { throw new ArgumentNullException("streamProviderRegistry"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; _dbfReader = new DbaseFileReader(streamProviderRegistry); _shpReader = new ShapefileReader(streamProviderRegistry, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Initializes a new instance of the ShapefileDataReader class. /// </summary> /// <param name="filename">The shapefile to read (minus the .shp extension)</param> ///<param name="geometryFactory">The GeometryFactory to use.</param> public ShapefileDataReader(string filename, IGeometryFactory geometryFactory) { if (String.IsNullOrEmpty(filename)) throw new ArgumentNullException("filename"); if (geometryFactory == null) throw new ArgumentNullException("geometryFactory"); _open = true; string dbfFile = Path.ChangeExtension(filename, "dbf"); _dbfReader = new DbaseFileReader(dbfFile); string shpFile = Path.ChangeExtension(filename, "shp"); _shpReader = new ShapefileReader(shpFile, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) _dbaseFields[i + 1] = _dbfHeader.Fields[i]; _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Gets the stub header. /// </summary> /// <param name="feature">The feature.</param> /// <param name="count">The count.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(IFeature feature, int count) { IAttributesTable attribs = feature.Attributes; string[] names = attribs.GetNames(); DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (string name in names) { Type type = attribs.GetType(name); if (type == typeof(double) || type == typeof(float)) header.AddColumn(name, 'N', DoubleLength, DoubleDecimals); else if (type == typeof(short) || type == typeof(ushort) || type == typeof(int) || type == typeof(uint) || type == typeof(long) || type == typeof(ulong)) header.AddColumn(name, 'N', IntLength, IntDecimals); else if (type == typeof(string)) header.AddColumn(name, 'C', StringLength, StringDecimals); else if (type == typeof(bool)) header.AddColumn(name, 'L', BoolLength, BoolDecimals); else if (type == typeof(DateTime)) header.AddColumn(name, 'D', DateLength, DateDecimals); else throw new ArgumentException("Type " + type.Name + " not supported"); } return header; }
/// <summary> /// Initializes a new instance of the ShapefileDataReader class. /// </summary> /// <param name="filename">The shapefile to read (minus the .shp extension)</param> ///<param name="geometryFactory">The GeometryFactory to use.</param> public ShapefileDataReader(string filename, IGeometryFactory geometryFactory) { if (String.IsNullOrEmpty(filename)) { throw new ArgumentNullException("filename"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; string dbfFile = Path.ChangeExtension(filename, "dbf"); _dbfReader = new DbaseFileReader(dbfFile); string shpFile = Path.ChangeExtension(filename, "shp"); _shpReader = new ShapefileReader(shpFile, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Gets the header from a dbf file. /// </summary> /// <param name="dbfFile">The DBF file.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(string dbfFile) { if (!File.Exists(dbfFile)) throw new FileNotFoundException(dbfFile + " not found"); DbaseFileHeader header = new DbaseFileHeader(); header.ReadHeader(new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.Read)), dbfFile); return header; }
public static DbaseFileHeader GetHeader(IStreamProviderRegistry streamProviderRegistry) { DbaseFileHeader header = new DbaseFileHeader(); using (var stream = streamProviderRegistry[StreamTypes.Data].OpenRead()) using (var reader = new BinaryReader(stream)) header.ReadHeader(reader, streamProviderRegistry[StreamTypes.Data] is FileStreamProvider ? ((FileStreamProvider)streamProviderRegistry[StreamTypes.Data]).Path : null); return(header); }
/// <summary> /// </summary> protected void ReadHeader() { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(_dbfReader, _parent._encodingProvider); // how many records remain _readPosition = _header.HeaderLength; }
/// <summary> /// /// </summary> protected void ReadHeader() { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(_dbfStream); // how many records remain _readPosition = _header.HeaderLength; }
public void ok_when_writing_shapefile_with_no_features() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn("X", 'C', 10, 0); ShapefileDataWriter writer = new ShapefileDataWriter(@"issue36") { Header = header }; IList<IFeature> features = new List<IFeature>(); writer.Write(features); }
/// <summary> /// Method to write <paramref name="header"/> to the dbase stream /// </summary> /// <param name="header">The header to write</param> public void Write(DbaseFileHeader header) { //if (header == null) // throw new ArgumentNullException("header"); ////if (_recordsWritten) //// throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); //_headerWritten = true; //if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) //{ // header.Encoding = _encoding; //} //header.WriteHeader(_writer); //_header = header; if (header == null) { throw new ArgumentNullException("header"); } //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; // Set the encoding if not already done. if (header.Encoding == null) { header.Encoding = _encoding; } if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) { header.Encoding = _encoding; } // Get the current position var currentPosition = (int)_writer.BaseStream.Position; //Header should always be written first in the file if (_writer.BaseStream.Position != 0) { _writer.Seek(0, SeekOrigin.Begin); } // actually write the header header.WriteHeader(_writer); // reposition the stream if (currentPosition != 0) { _writer.Seek(currentPosition, SeekOrigin.Begin); } _header = header; }
public static DbaseFileHeader GetHeader(IStreamProviderRegistry streamProviderRegistry) { DbaseFileHeader header = new DbaseFileHeader(); using (var stream = streamProviderRegistry[StreamTypes.Data].OpenRead()) using (var reader = new BinaryReader(stream)) header.ReadHeader(reader, streamProviderRegistry[StreamTypes.Data] is FileStreamProvider ? ((FileStreamProvider)streamProviderRegistry[StreamTypes.Data]).Path : null); return header; }
public static DbaseFileHeader GetHeader(DbaseFieldDescriptor[] dbFields, int count) { DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (DbaseFieldDescriptor dbField in dbFields) header.AddColumn(dbField.Name, dbField.DbaseType, dbField.Length, dbField.DecimalCount); return header; }
/// <summary> /// Gets the header from a dbf file. /// </summary> /// <param name="dbfFile">The DBF file.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(string dbfFile) { if (!File.Exists(dbfFile)) { throw new FileNotFoundException(dbfFile + " not found"); } DbaseFileHeader header = new DbaseFileHeader(); header.ReadHeader(new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.Read)), dbfFile); return(header); }
public void ok_when_writing_shapefile_with_no_features() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn("X", 'C', 10, 0); ShapefileDataWriter writer = new ShapefileDataWriter(@"issue36") { Header = header }; IList<IFeature> features = new List<IFeature>(); Assert.DoesNotThrow(() => writer.Write(features)); _numPassed++; }
/// <summary> /// </summary> protected void ReadHeader() { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(_dbfStream, _parent._streamProvider is FileStreamProvider ? ((FileStreamProvider)_parent._streamProvider).Path : null); // how many records remain _readPosition = _header.HeaderLength; }
/// <summary> /// /// </summary> /// <param name="header"></param> public void Write(DbaseFileHeader header) { if (header == null) { throw new ArgumentNullException("header"); } //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; header.WriteHeader(_writer); _header = header; }
public static DbaseFileHeader GetHeader(DbaseFieldDescriptor[] dbFields, int count) { DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (DbaseFieldDescriptor dbField in dbFields) { header.AddColumn(dbField.Name, dbField.DbaseType, dbField.Length, dbField.DecimalCount); } return(header); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (var dbfReader = new BinaryReader(_streamProvider.OpenRead())) { // read the header _header = new DbaseFileHeader(); _header.ReadHeader(dbfReader, _encodingProvider); } } return(_header); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (var stream = _streamProvider.OpenRead()) using (var dbfStream = new BinaryReader(stream)) { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, _streamProvider is FileStreamProvider ? ((FileStreamProvider)_streamProvider).Path : null); } } return(_header); }
public void ok_when_writing_shapefile_with_features() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn("X", 'C', 10, 0); ShapefileDataWriter writer = new ShapefileDataWriter(@"issue36") { Header = header }; IAttributesTable attributesTable = new AttributesTable(); attributesTable.AddAttribute("X", "y"); IFeature feature = new Feature(new Point(1, 2), attributesTable); IList<IFeature> features = new List<IFeature>(); features.Add(feature); writer.Write(features); }
/// <summary> /// </summary> /// <param name="header"></param> public void Write(DbaseFileHeader header) { if (header == null) throw new ArgumentNullException("header"); //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) { header.Encoding = _encoding; } header.WriteHeader(_writer); _header = header; }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { FileStream stream = new FileStream(_filename, FileMode.Open, FileAccess.Read); BinaryReader dbfStream = new BinaryReader(stream, PlatformUtilityEx.GetDefaultEncoding()); _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream); dbfStream.Close(); stream.Close(); } return(_header); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { FileStream stream = new FileStream(_filename, FileMode.Open, FileAccess.Read); BinaryReader dbfStream = new BinaryReader(stream); _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, _filename); dbfStream.Close(); stream.Close(); } return(_header); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { var stream = IoManager.File.CreateFileStream(_filename, FileMode.Open, FileAccess.Read); BinaryReader dbfStream = new BinaryReader(stream); _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, _filename); dbfStream.Close(); stream.Close(); } return _header; }
/// <summary> /// Write the enumeration of features to shapefile (shp, shx and dbf) /// </summary> /// <param name="filename">Filename to create</param> /// <param name="features">Enumeration of features to write, features will be enumerated once</param> /// <param name="fields">Fields that should be written, only those attributes specified here will be mapped from the feature attributetable while writing</param> /// <param name="shapeGeometryType">Type of geometries shapefile</param> /// <param name="dbfEncoding">Optional Encoding to be used when writing the DBF-file (default Windows-1252)</param> public static void WriteFeatures(string filename, IEnumerable <IFeature> features, DbaseFieldDescriptor[] fields, ShapeGeometryType shapeGeometryType, Encoding dbfEncoding = null) { // Set default encoding if not specified if (dbfEncoding == null) { dbfEncoding = DbaseEncodingUtility.GetEncodingForCodePageIdentifier(1252); } // Open shapefile and dbase stream writers using (var shpWriter = new ShapefileWriter(Path.ChangeExtension(filename, ".shp"), shapeGeometryType)) { using (var dbfWriter = new DbaseFileWriter(Path.ChangeExtension(filename, ".dbf"), dbfEncoding)) { var dbfHeader = new DbaseFileHeader(dbfEncoding); foreach (var field in fields) { dbfHeader.AddColumn(field.Name, field.DbaseType, field.Length, field.DecimalCount); } dbfWriter.Write(dbfHeader); int numFeatures = 0; string[] fieldNames = Array.ConvertAll(fields, field => field.Name); object[] values = new object[fieldNames.Length]; foreach (var feature in features) { shpWriter.Write(feature.Geometry); for (int i = 0; i < fieldNames.Length; i++) { values[i] = feature.Attributes[fieldNames[i]]; } dbfWriter.Write(values); numFeatures++; } // set the number of records dbfHeader.NumRecords = numFeatures; // Update the header dbfWriter.Write(dbfHeader); // write the end of dbase file marker dbfWriter.WriteEndOfDbf(); // close the dbase stream dbfWriter.Close(); } } }
/// <summary> /// Method to write <paramref name="header"/> to the dbase stream /// </summary> /// <param name="header">The header to write</param> public void Write(DbaseFileHeader header) { //if (header == null) // throw new ArgumentNullException("header"); ////if (_recordsWritten) //// throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); //_headerWritten = true; //if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) //{ // header.Encoding = _encoding; //} //header.WriteHeader(_writer); //_header = header; if (header == null) throw new ArgumentNullException("header"); //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) { header.Encoding = _encoding; } // Get the current position var currentPosition = (int)_writer.BaseStream.Position; //Header should always be written first in the file if (_writer.BaseStream.Position != 0) _writer.Seek(0, SeekOrigin.Begin); // actually write the header header.WriteHeader(_writer); // reposition the stream if (currentPosition != 0) _writer.Seek(currentPosition, SeekOrigin.Begin); _header = header; }
/// <summary> /// Gets the stub header. /// </summary> /// <param name="feature">The feature.</param> /// <param name="count">The count.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(IFeature feature, int count) { IAttributesTable attribs = feature.Attributes; string[] names = attribs.GetNames(); DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (string name in names) { Type type = attribs.GetType(name); if (type == typeof(double) || type == typeof(float)) { header.AddColumn(name, 'N', DoubleLength, DoubleDecimals); } else if (type == typeof(short) || type == typeof(ushort) || type == typeof(int) || type == typeof(uint)) { header.AddColumn(name, 'N', IntLength, IntDecimals); } else if (type == typeof(long) || type == typeof(ulong)) { header.AddColumn(name, 'N', LongLength, IntDecimals); } else if (type == typeof(string)) { header.AddColumn(name, 'C', StringLength, StringDecimals); } else if (type == typeof(bool)) { header.AddColumn(name, 'L', BoolLength, BoolDecimals); } else if (type == typeof(DateTime)) { header.AddColumn(name, 'D', DateLength, DateDecimals); } else { throw new ArgumentException("Type " + type.Name + " not supported"); } } return(header); }
/// <summary> /// Initializes a new instance of the ShapefileDataReader class. /// </summary> /// <param name="filename">The shapefile to read (minus the .shp extension)</param> ///<param name="geometryFactory">The GeometryFactory to use.</param> public ShapeMemoryStreamDataReader(Stream shpStream, Stream dbfStream, IGeometryFactory geometryFactory) { if (shpStream == null) { throw new ArgumentNullException("shpStream"); } if (dbfStream == null) { throw new ArgumentNullException("dbfStream"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; //if (filename.ToLower().EndsWith(".shp")) // filename = filename.ToLower().Replace(".shp", String.Empty); _shpReader = new ShapeMemoryStreamReader(shpStream, geometryFactory); _dbfReader = new DbaseMemoryStreamReader(dbfStream); //_dbfReader = new DbaseFileReader(filename + ".dbf"); //_shpReader = new ShapefileReader(filename + ".shp", geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
public void Setup() { ShapefileDataWriter sfdr = new ShapefileDataWriter("encoding_sample"); DbaseFileHeader h = new DbaseFileHeader(); h.AddColumn("id", 'n', 8, 0); h.AddColumn("Test", 'C', 15, 0); h.AddColumn("Ålder", 'N', 8, 0); h.AddColumn("Ödestext", 'C', 255, 0); h.NumRecords = 1; sfdr.Header = h; List<IFeature> feats = new List<IFeature>(); AttributesTable at = new AttributesTable(); at.AddAttribute("id", "0"); at.AddAttribute("Test", "Testar"); at.AddAttribute("Ålder", 10); at.AddAttribute("Ödestext", "Lång text med åäö etc"); feats.Add(new Feature(new Point(0, 0), at)); sfdr.Write(feats); }
/// <summary> /// /// </summary> /// <param name="filename"></param> /// <param name="recordCount"></param> public static void WriteDummyDbf(string filename, int recordCount) { DbaseFileHeader dbfHeader = new DbaseFileHeader(); dbfHeader.NumRecords = recordCount; dbfHeader.AddColumn("Description", 'C', 20, 0); DbaseFileWriter dbfWriter = new DbaseFileWriter(filename); dbfWriter.Write(dbfHeader); for (int i = 0; i < recordCount; i++) { List <double> columnValues = new List <double>(); columnValues.Add((double)i); dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.Write(0x1A); dbfWriter.Close(); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { //FileStream stream = new FileStream(_filename, FileMode.Open, FileAccess.Read); BinaryReader dbfStream = new BinaryReader(_inputStream, PlatformUtilityEx.GetDefaultEncoding()); _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream); //TODO: SEEK? _inputStream.Seek(0, SeekOrigin.Begin); //dbfStream.Close(); //stream.Close(); } return(_header); }
/// <summary> /// /// </summary> /// <param name="filename"></param> /// <param name="recordCount"></param> public static void WriteDummyDbf(string filename, int recordCount) { filename = Path.ChangeExtension(filename, "dbf"); var dbfHeader = new DbaseFileHeader { NumRecords = recordCount }; dbfHeader.AddColumn("Description", 'C', 20, 0); var dbfWriter = new DbaseFileWriter(filename); dbfWriter.Write(dbfHeader); for (var i = 0; i < recordCount; i++) { var columnValues = new List <double> { i }; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.Write(0x1A); dbfWriter.Close(); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (IsolatedStorageFile isf = IsolatedStorageFile.GetUserStoreForApplication()) { using (IsolatedStorageFileStream stream = new IsolatedStorageFileStream(_filename, FileMode.Open, FileAccess.Read, isf)) { using (BinaryReader dbfStream = new BinaryReader(stream)) { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, string.Empty); dbfStream.Close(); stream.Close(); } } } } return(_header); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (IsolatedStorageFile isf = IsolatedStorageFile.GetUserStoreForApplication()) { using (IsolatedStorageFileStream stream = new IsolatedStorageFileStream(_filename, FileMode.Open, FileAccess.Read, isf)) { using (BinaryReader dbfStream = new BinaryReader(stream)) { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, string.Empty); dbfStream.Close(); stream.Close(); } } } } return _header; }
/// <summary> /// Write the enumeration of features to shapefile (shp, shx and dbf) /// </summary> /// <param name="filename">Filename to create</param> /// <param name="features">Enumeration of features to write, features will be enumerated once</param> /// <param name="fields">Fields that should be written, only those attributes specified here will be mapped from the feature attributetable while writing</param> /// <param name="shapeGeometryType">Type of geometries shapefile</param> /// <param name="dbfEncoding">Optional Encoding to be used when writing the DBF-file (default Windows-1252)</param> public static void WriteFeatures(string filename, IEnumerable<IFeature> features, DbaseFieldDescriptor[] fields, ShapeGeometryType shapeGeometryType, Encoding dbfEncoding = null) { // Set default encoding if not specified if (dbfEncoding == null) dbfEncoding = Encoding.GetEncoding(1252); // Open shapefile and dbase stream writers using (var shpWriter = new ShapefileWriter(Path.ChangeExtension(filename, ".shp"), shapeGeometryType)) { using (var dbfWriter = new DbaseFileWriter(Path.ChangeExtension(filename, ".dbf"), dbfEncoding)) { var dbfHeader = new DbaseFileHeader(dbfEncoding); foreach (var field in fields) { dbfHeader.AddColumn(field.Name, field.DbaseType, field.Length, field.DecimalCount); } dbfWriter.Write(dbfHeader); var numFeatures = 0; foreach (var feature in features) { shpWriter.Write(feature.Geometry); var values = new object[fields.Length]; for (var i = 0; i < fields.Length; i++) { values[i] = feature.Attributes[fields[i].Name]; } dbfWriter.Write(values); numFeatures++; } // set the number of records dbfHeader.NumRecords = numFeatures; // Update the header dbfWriter.Write(dbfHeader); // write the end of dbase file marker dbfWriter.WriteEndOfDbf(); // close the dbase stream dbfWriter.Close(); } } }
/// <summary> /// Initializes a new instance of the DbaseFileWriter class using the provided <paramref name="streamProviderRegistry"/> and the default encoding /// </summary> /// <param name="streamProviderRegistry">The stream provider registry</param> public DbaseFileWriter(IStreamProviderRegistry streamProviderRegistry) : this(streamProviderRegistry, DbaseFileHeader.GetEncoding(streamProviderRegistry[StreamTypes.DataEncoding])) { }
public static void WriteDummyDbf(DbaseFileWriter dbfWriter, int recordCount) { var dbfHeader = new DbaseFileHeader { NumRecords = recordCount }; dbfHeader.AddColumn("Description", 'C', 20, 0); dbfWriter.Write(dbfHeader); for (var i = 0; i < recordCount; i++) { var columnValues = new List<double> { i }; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.Write(0x1A); dbfWriter.Close(); }
private void SaveGraphResult(IGeometry path) { if (path == null) throw new ArgumentNullException("path"); const string shapepath = "graphresult"; if (File.Exists(shapepath + shp)) File.Delete(shapepath + shp); Assert.IsFalse(File.Exists(shapepath + shp)); if (File.Exists(shapepath + shx)) File.Delete(shapepath + shx); Assert.IsFalse(File.Exists(shapepath + shx)); if (File.Exists(shapepath + dbf)) File.Delete(shapepath + dbf); Assert.IsFalse(File.Exists(shapepath + dbf)); const string field1 = "OBJECTID"; var feature = new Feature(path, new AttributesTable()); feature.Attributes.AddAttribute(field1, 0); var header = new DbaseFileHeader {NumRecords = 1, NumFields = 1}; header.AddColumn(field1, 'N', 5, 0); var writer = new ShapefileDataWriter(shapepath, factory) {Header = header}; writer.Write(new List<IFeature>(new[] {feature,})); Assert.IsTrue(File.Exists(shapepath + shp)); Assert.IsTrue(File.Exists(shapepath + shx)); Assert.IsTrue(File.Exists(shapepath + dbf)); }
/// <summary> /// Method to write a dummy dbase file /// </summary> /// <param name="dbfWriter">The dbase file writer</param> /// <param name="recordCount">The number of records</param> public static void WriteDummyDbf(DbaseFileWriter dbfWriter, int recordCount) { // Create the dummy header var dbfHeader = new DbaseFileHeader {NumRecords = recordCount}; // add some dummy column dbfHeader.AddColumn("Description", 'C', 20, 0); // Write the header dbfWriter.Write(dbfHeader); // Write the features for (var i = 0; i < recordCount; i++) { var columnValues = new List<double> {i}; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.WriteEndOfDbf(); dbfWriter.Close(); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> public DbaseFileHeader GetHeader() { if (_header == null) { using (var stream = _streamProvider.OpenRead()) using (var dbfStream = new BinaryReader(stream)) { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(dbfStream, _streamProvider is FileStreamProvider ? ((FileStreamProvider)_streamProvider).Path : null); } } return _header; }
/// <summary> /// /// </summary> protected void ReadHeader() { _header = new DbaseFileHeader(); // read the header _header.ReadHeader(_dbfStream, _parent._filename); // how many records remain _readPosition = _header.HeaderLength; }
/// <summary> /// /// </summary> /// <param name="filename"></param> /// <param name="recordCount"></param> public static void WriteDummyDbf(string filename, int recordCount) { filename = Path.ChangeExtension(filename, "dbf"); var dbfHeader = new DbaseFileHeader {NumRecords = recordCount}; dbfHeader.AddColumn("Description",'C', 20, 0); var dbfWriter = new DbaseFileWriter(filename); dbfWriter.Write(dbfHeader); for (var i = 0; i < recordCount; i++) { var columnValues = new List<double> {i}; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.Write(0x1A); dbfWriter.Close(); }
/// <summary> /// /// </summary> /// <param name="header"></param> public void Write(DbaseFileHeader header) { if (header == null) throw new ArgumentNullException("header"); //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; #if !SILVERLIGHT && !PCL if (header.Encoding.WindowsCodePage != _encoding.WindowsCodePage) { #else if (header.Encoding.WebName != _encoding.WebName) { #endif header.Encoding = _encoding; } header.WriteHeader(_writer); _header = header; } /// <summary> /// /// </summary> /// <param name="columnValues"></param> public void Write(IList columnValues) { if (columnValues == null) throw new ArgumentNullException("columnValues"); if (!_headerWritten) throw new InvalidOperationException("Header records need to be written first."); int i = 0; _writer.Write((byte)0x20); // the deleted flag foreach (object columnValue in columnValues) { DbaseFieldDescriptor headerField = _header.Fields[i]; if (columnValue == null) // Don't corrupt the file by not writing if the value is null. // Instead, treat it like an empty string. Write(string.Empty, headerField.Length); else if (headerField.Type == typeof(string)) // If the column is a character column, the values in that // column should be treated as text, even if the column value // is not a string. Write(columnValue.ToString(), headerField.Length); else if (IsRealType(columnValue.GetType())) Write(Convert.ToDecimal(columnValue), headerField.Length, headerField.DecimalCount); else if (IsIntegerType(columnValue.GetType())) Write(Convert.ToDecimal(columnValue), headerField.Length, headerField.DecimalCount); else if (columnValue is Decimal) Write((decimal)columnValue, headerField.Length, headerField.DecimalCount); else if (columnValue is Boolean) Write((bool)columnValue); else if (columnValue is string) Write((string)columnValue, headerField.Length); else if (columnValue is DateTime) Write((DateTime)columnValue); else if (columnValue is Char) Write((Char)columnValue, headerField.Length); i++; } }