public List <Feature> LoadFeatureList(string filename) { List <Feature> features = new List <Feature>(); if (File.Exists(filename)) { using (ShapefileDataReader sdr = new ShapefileDataReader(filename, GeometryFactory.Default)) { DbaseFileHeader header = sdr.DbaseHeader; while (sdr.Read()) { Feature feature = new Feature(); AttributesTable attributesTable = new AttributesTable(); string[] keys = new string[header.NumFields]; Geometry geometry = (Geometry)sdr.Geometry; for (int i = 0; i < header.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = header.Fields[i]; keys[i] = fldDescriptor.Name; attributesTable.AddAttribute(fldDescriptor.Name, sdr.GetValue(i)); } feature.Geometry = geometry; feature.Attributes = attributesTable; features.Add(feature); } } } return(features); }
/// <summary> /// Method to write a dummy dbase file /// </summary> /// <param name="dbfWriter">The dbase file writer</param> /// <param name="recordCount">The number of records</param> public static void WriteDummyDbf(DbaseFileWriter dbfWriter, int recordCount) { // Create the dummy header var dbfHeader = new DbaseFileHeader { NumRecords = recordCount }; // add some dummy column dbfHeader.AddColumn("Description", 'C', 20, 0); // Write the header dbfWriter.Write(dbfHeader); // Write the features for (var i = 0; i < recordCount; i++) { var columnValues = new List <double> { i }; dbfWriter.Write(columnValues); } // End of file flag (0x1A) dbfWriter.WriteEndOfDbf(); dbfWriter.Close(); }
public async Task WriteAsync(ZipArchive archive, EditorContext context, CancellationToken cancellationToken) { if (archive == null) { throw new ArgumentNullException(nameof(archive)); } if (context == null) { throw new ArgumentNullException(nameof(context)); } var dbfEntry = archive.CreateEntry(_filename); var dbfHeader = new DbaseFileHeader( DateTime.Now, DbaseCodePage.Western_European_ANSI, new DbaseRecordCount(_records.Count), _schema ); using (var dbfEntryStream = dbfEntry.Open()) using (var dbfWriter = new DbaseBinaryWriter( dbfHeader, new BinaryWriter(dbfEntryStream, _encoding, true))) { foreach (var dbfRecord in _records) { dbfWriter.Write(dbfRecord); } dbfWriter.Writer.Flush(); await dbfEntryStream.FlushAsync(cancellationToken); } }
private static AnnotatedObject <Geometry>[] getGeomsFromShpFile(string fileName) { List <AnnotatedObject <Geometry> > output = new List <AnnotatedObject <Geometry> >(); using (var reader = new ShapefileDataReader(fileName, GeometryFactory.Default)) { while (reader.Read()) { DbaseFileHeader h = reader.DbaseHeader; int fieldCount = h.NumFields; var geom = reader.Geometry; geom.Normalize(); var obj = new AnnotatedObject <Geometry>(geom); for (int i = 1; i <= fieldCount; i++) { try { obj.Data[h.Fields[i - 1].Name] = reader.GetValue(i); } catch { } } output.Add(obj); } } return(output.ToArray()); }
/// <summary> /// Reads a shapefile into a arraylist of features that need converting from x,y coordinates to Long and Lat coordinates /// </summary> /// <param name="filename">name of the shapefile (the file that has all the polygons for the footpaths)</param> /// <param name="fact">the class that generates the structure of the points</param> /// <returns></returns> public ArrayList ReadSHP(string filename, GeometryFactory fact) { ArrayList features = new ArrayList(); //Array list for all the coordinates from the shapefile ShapefileDataReader sfDataReader = new ShapefileDataReader(filename, fact); ShapefileHeader shpHeader = sfDataReader.ShapeHeader; DbaseFileHeader DHeader = sfDataReader.DbaseHeader; while (sfDataReader.Read() == true) { Feature feature = new Feature(); AttributesTable atTable = new AttributesTable(); string[] keys = new string[DHeader.NumFields]; Geometry geometry = sfDataReader.Geometry; for (int i = 0; i < DHeader.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = DHeader.Fields[i]; keys[i] = fldDescriptor.Name; atTable.Add(fldDescriptor.Name, sfDataReader.GetValue(i)); } feature.Geometry = geometry; feature.Attributes = atTable; features.Add(feature); } sfDataReader.Close(); sfDataReader.Dispose(); return(features); }
public static List <Feature> ReadShapefile(string shpFilename, int max = int.MaxValue) { var features = new List <Feature>(); using (ShapefileDataReader dr = new ShapefileDataReader(shpFilename, new GeometryFactory())) { DbaseFileHeader header = dr.DbaseHeader; int n = 0; while (dr.Read()) { n++; if (n > max) { break; } AttributesTable attributesTable = new AttributesTable(); for (int i = 0; i < header.NumFields; i++) { attributesTable.AddAttribute(header.Fields[i].Name, dr.GetValue(i)); } features.Add(new Feature(dr.Geometry, attributesTable)); } } return(features); }
public void ReadDbfDate() { string file = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, string.Format("..{0}..{0}..{0}NetTopologySuite.Samples.Shapefiles{0}date.dbf", Path.DirectorySeparatorChar)); if (!File.Exists(file)) { throw new FileNotFoundException("file not found at " + Path.GetDirectoryName(file)); } DbaseFileReader reader = new DbaseFileReader(file); DbaseFileHeader header = reader.GetHeader(); IEnumerator ienum = reader.GetEnumerator(); ienum.MoveNext(); ArrayList items = ienum.Current as ArrayList; Assert.IsNotNull(items); Assert.AreEqual(2, items.Count); foreach (Object item in items) { Assert.IsNotNull(item); } DateTime date = (DateTime)items[1]; Assert.AreEqual(10, date.Day); Assert.AreEqual(3, date.Month); Assert.AreEqual(2006, date.Year); }
public static IReadOnlyCollection <Feature> ReadFeatures(this ShapefileDataReader shapefileDataReader) { List <Feature> features = new List <Feature>(); while (shapefileDataReader.Read()) { Feature feature = new Feature(); AttributesTable attributesTable = new AttributesTable(); DbaseFileHeader header = shapefileDataReader.DbaseHeader; string[] keys = new string[header.NumFields]; var geometry = shapefileDataReader.Geometry; for (int i = 0; i < header.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = header.Fields[i]; keys[i] = fldDescriptor.Name; // First Field Geometry var value = shapefileDataReader.GetValue(i + 1); attributesTable.Add(fldDescriptor.Name, value); } feature.Geometry = geometry; feature.Attributes = attributesTable; features.Add(feature); } return(features); }
private DbaseFileHeader getStrabsHeader() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn(StrabShapeFileConstants.ID, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Strassenname, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Bezeichnungbis, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Bezeichnungvon, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Eigentuemer, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Ortsbezeichnung, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Belastungskategorie, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.Belag, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.BreiteFahrbahn, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.Laenge, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.FlaecheFahrbahn, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.Trottoir, 'C', StringLength, StringDecimals); header.AddColumn(StrabShapeFileConstants.BreiteTrottoirlinks, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.BreiteTrottoirrechts, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.FlaecheTrottoirlinks, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.FlaecheTrottoirrechts, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.FlaecheTrottoir, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.Wiederbeschaffungswert, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.AlterungsbeiwertI, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.WertverlustI, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.AlterungsbeiwertII, 'N', DoubleLength, DoubleDecimals); header.AddColumn(StrabShapeFileConstants.WertverlustII, 'N', DoubleLength, DoubleDecimals); return(header); }
/// <summary> /// Test getting and setting the properties /// </summary> public void Test_TestProperties() { DbaseFileReader dbfReader = new DbaseFileReader(Global.GetUnitTestRootDirectory() + @"\IO\Shapefile\Testfiles\statepop.dbf"); DbaseFileHeader dbfHeader = dbfReader.GetHeader(); Assertion.AssertEquals("Dbase header: Num records", 49, dbfHeader.NumRecords); Assertion.AssertEquals("Dbase header: Num fields", 252, dbfHeader.NumFields); Assertion.AssertEquals("Field 0: name", "STATE_NAME", dbfHeader.Fields[0].Name); Assertion.AssertEquals("Field 0: name", 'C', dbfHeader.Fields[0].DbaseType); Assertion.AssertEquals("Field 0: name", typeof(string), dbfHeader.Fields[0].Type); Assertion.AssertEquals("Field 0: name", 25, dbfHeader.Fields[0].Length); Assertion.AssertEquals("Field 251: name", "SAMP_POP", dbfHeader.Fields[251].Name); Assertion.AssertEquals("Field 251: name", 'N', dbfHeader.Fields[251].DbaseType); Assertion.AssertEquals("Field 251: name", typeof(double), dbfHeader.Fields[251].Type); Assertion.AssertEquals("Field 251: name", 19, dbfHeader.Fields[251].Length); // note alaska and hawaii are missing - hence 48 states not 50. int i = 0; foreach (ArrayList columnValues in dbfReader) { if (i == 0) { Assertion.AssertEquals("Row1: STATE_NAME:", "Illinois", columnValues[0]); Assertion.AssertEquals("Row1: STATE_FIPS:", "17", columnValues[1].ToString()); Assertion.AssertEquals("Row1: SAMP_POP", 1747776.0, columnValues[251]); } if (i == 48) { Assertion.AssertEquals("Row1: STATE_NAME:", "Washington", columnValues[0]); Assertion.AssertEquals("Row1: STATE_FIPS:", "53", columnValues[1].ToString()); Assertion.AssertEquals("Row1: SAMP_POP", 736744.0, columnValues[251]); } i++; } Assertion.AssertEquals("48 Records", 48, i - 1); DbaseFileReader dbfReader2 = new DbaseFileReader(Global.GetUnitTestRootDirectory() + @"\IO\Shapefile\Testfiles\statepop.dbf"); i = 0; foreach (ArrayList columnValues in dbfReader2) { if (i == 0) { Assertion.AssertEquals("Row1: STATE_NAME:", "Illinois", columnValues[0]); Assertion.AssertEquals("Row1: STATE_FIPS:", "17", columnValues[1].ToString()); Assertion.AssertEquals("Row1: SAMP_POP", 1747776.0, columnValues[251]); } if (i == 48) { Assertion.AssertEquals("Row1: STATE_NAME:", "Washington", columnValues[0]); Assertion.AssertEquals("Row1: STATE_FIPS:", "53", columnValues[1].ToString()); Assertion.AssertEquals("Row1: SAMP_POP", 736744.0, columnValues[251]); } i++; } Assertion.AssertEquals("48 Records", 48, i - 1); }
private DbaseFileHeader getTrottoirHeader() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn(TrottoirShapeFileConstants.StrassenabschnittID, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Strassenname, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.StrassenabschnittBezeichnungvon, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.StrassenabschnittBezeichnungbis, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Eigentuemer, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Ortsbezeichnung, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.ID, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.ZustandsAbschnittID, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Bezeichnungvon, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Bezeichnungbis, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Laenge, 'N', DoubleLength, DoubleDecimals); header.AddColumn(TrottoirShapeFileConstants.Breite, 'N', DoubleLength, DoubleDecimals); header.AddColumn(TrottoirShapeFileConstants.FlaecheTrottoir, 'N', DoubleLength, DoubleDecimals); header.AddColumn(TrottoirShapeFileConstants.Lage, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Aufnahmedatum, 'D', DateLength, DateDecimals); header.AddColumn(TrottoirShapeFileConstants.Aufnahmeteam, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Zustandsindex, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Massnahmenvorschlag, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Kosten, 'N', DoubleLength, DoubleDecimals); header.AddColumn(TrottoirShapeFileConstants.Dringlichkeit, 'C', StringLength, StringDecimals); header.AddColumn(TrottoirShapeFileConstants.Gesamtkosten, 'N', DoubleLength, DoubleDecimals); return(header); }
public void ReadDbfDate() { string file = Path.Combine(CommonHelpers.TestShapefilesDirectory, "date.dbf"); if (!File.Exists(file)) { throw new FileNotFoundException("file not found at " + Path.GetDirectoryName(file)); } DbaseFileReader reader = new DbaseFileReader(file); DbaseFileHeader header = reader.GetHeader(); IEnumerator ienum = reader.GetEnumerator(); ienum.MoveNext(); ArrayList items = ienum.Current as ArrayList; Assert.IsNotNull(items); Assert.AreEqual(2, items.Count); foreach (Object item in items) { Assert.IsNotNull(item); } DateTime date = (DateTime)items[1]; Assert.AreEqual(10, date.Day); Assert.AreEqual(3, date.Month); Assert.AreEqual(2006, date.Year); }
public ShapefileDataReader(IStreamProviderRegistry streamProviderRegistry, IGeometryFactory geometryFactory) { if (streamProviderRegistry == null) { throw new ArgumentNullException("streamProviderRegistry"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; _dbfReader = new DbaseFileReader(streamProviderRegistry); _shpReader = new ShapefileReader(streamProviderRegistry, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Initializes a new instance of the ShapefileDataReader class. /// </summary> /// <param name="filename">The shapefile to read (minus the .shp extension)</param> ///<param name="geometryFactory">The GeometryFactory to use.</param> public ShapefileDataReader(string filename, IGeometryFactory geometryFactory) { if (String.IsNullOrEmpty(filename)) { throw new ArgumentNullException("filename"); } if (geometryFactory == null) { throw new ArgumentNullException("geometryFactory"); } _open = true; string dbfFile = Path.ChangeExtension(filename, "dbf"); _dbfReader = new DbaseFileReader(dbfFile); string shpFile = Path.ChangeExtension(filename, "shp"); _shpReader = new ShapefileReader(shpFile, geometryFactory); _dbfHeader = _dbfReader.GetHeader(); _recordCount = _dbfHeader.NumRecords; // copy dbase fields to our own array. Insert into the first position, the shape column _dbaseFields = new DbaseFieldDescriptor[_dbfHeader.Fields.Length + 1]; _dbaseFields[0] = DbaseFieldDescriptor.ShapeField(); for (int i = 0; i < _dbfHeader.Fields.Length; i++) { _dbaseFields[i + 1] = _dbfHeader.Fields[i]; } _shpHeader = _shpReader.Header; _dbfEnumerator = _dbfReader.GetEnumerator(); _shpEnumerator = _shpReader.GetEnumerator(); _moreRecords = true; }
/// <summary> /// Helper function for SetupHeader, correctly maps C# data types to DbaseFileHeader types /// </summary> /// <param name="header"></param> /// <param name="columnName"></param> /// <param name="t"></param> public static void AddColumn(DbaseFileHeader header, string columnName, Type t) { columnName = Utilities.EnsureMaxLength(columnName, 10); if (t == typeof(bool)) { header.AddColumn(columnName, 'L', 1, 0); } else if (t == typeof(string)) { header.AddColumn(columnName, 'C', 254, 0); } else if (t == typeof(DateTime)) { header.AddColumn(columnName, 'C', 22, 0); } else if (t == typeof(float) || t == typeof(double) || t == typeof(decimal)) { header.AddColumn(columnName, 'N', 18, 10); } else if (t == typeof(short) || t == typeof(int) || t == typeof(long) || t == typeof(ushort) || t == typeof(uint) || t == typeof(ulong)) { header.AddColumn(columnName, 'N', 18, 0); } }
/// <summary> /// Reads a shapefile into a arraylist of features that need converting from x,y coordinates to Long and Lat coordinates /// </summary> /// <param name="filename">name of the shapefile (the file that has all the polygons for the footpaths)</param> /// <param name="fact">the class that generates the structure of the points</param> /// <returns></returns> public ArrayList ReadSHP(string filename, GeometryFactory fact) { ArrayList features = new ArrayList(); //Array list for all the coordinates from the shapefile ShapefileDataReader sfDataReader = new ShapefileDataReader(filename, fact); //takes a file and a factory to build the geometries ShapefileHeader shpHeader = sfDataReader.ShapeHeader; //reads the headers of the file for checking and looping purposes DbaseFileHeader DHeader = sfDataReader.DbaseHeader; while (sfDataReader.Read() == true) //reading through all the data in the shapefile { Feature feature = new Feature(); //setting up a feature for each set of points AttributesTable atTable = new AttributesTable(); //table for the set of points string[] keys = new string[DHeader.NumFields]; Geometry geometry = sfDataReader.Geometry; for (int i = 0; i < DHeader.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = DHeader.Fields[i]; keys[i] = fldDescriptor.Name; atTable.Add(fldDescriptor.Name, sfDataReader.GetValue(i)); } feature.Geometry = geometry; feature.Attributes = atTable; //setting the variables for the feature features.Add(feature); } sfDataReader.Close();//closing the reader sfDataReader.Dispose(); return(features); }
static void DumpDbaseHeader(DbaseFileHeader header) { Console.WriteLine(string.Format("DBF: Fields.Length = {0} ; NumRecords = {1}", header.Fields.Length, header.NumRecords)); for (int i = 0; i < header.NumFields; ++i) { var field = header.Fields[i]; Console.WriteLine(string.Format(" [{0}] {1}", field.DbaseType, field.Name)); } }
public static DbaseFileHeader GetHeader(IStreamProviderRegistry streamProviderRegistry) { DbaseFileHeader header = new DbaseFileHeader(); using (var stream = streamProviderRegistry[StreamTypes.Data].OpenRead()) using (var reader = new BinaryReader(stream)) header.ReadHeader(reader, streamProviderRegistry[StreamTypes.Data] is FileStreamProvider ? ((FileStreamProvider)streamProviderRegistry[StreamTypes.Data]).Path : null); return(header); }
private static void HeaderChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { Layer layer = (Layer)d; DbaseFileHeader header = (DbaseFileHeader)e.NewValue; for (int i = 0; i < header.NumFields; i++) { layer.Attributes.Add(header.Fields[i]); } }
/// <summary> /// /// </summary> /// <param name="header"></param> public void Write(DbaseFileHeader header) { if (header == null) throw new ArgumentNullException("header"); //if (_recordsWritten) // throw new InvalidOperationException("Records have already been written. Header file needs to be written first."); _headerWritten = true; header.WriteHeader(_writer); _header = header; }
public void ValidatePassesExpectedDbaseRecordsToDbaseRecordValidator() { var schema = new FakeDbaseSchema(); var validator = new CollectDbaseRecordValidator(); var sut = new ZipArchiveDbaseEntryValidator <FakeDbaseRecord>( Encoding.UTF8, schema, validator); var records = new [] { new FakeDbaseRecord { Field = { Value = 1 } }, new FakeDbaseRecord { Field = { Value = 2 } } }; var date = DateTime.Today; var header = new DbaseFileHeader( date, DbaseCodePage.Western_European_ANSI, new DbaseRecordCount(records.Length), schema); using (var stream = new MemoryStream()) { using (var archive = new ZipArchive(stream, ZipArchiveMode.Create, true)) { var entry = archive.CreateEntry("entry"); using (var entryStream = entry.Open()) using (var writer = new BinaryWriter(entryStream, Encoding.UTF8)) { header.Write(writer); foreach (var record in records) { record.Write(writer); } writer.Write(DbaseRecord.EndOfFile); entryStream.Flush(); } } stream.Flush(); stream.Position = 0; using (var archive = new ZipArchive(stream, ZipArchiveMode.Read, true)) { var entry = archive.GetEntry("entry"); var result = sut.Validate(entry); Assert.Equal(ZipArchiveProblems.None, result); Assert.Equal(records, validator.Collected); } } }
/// <summary> /// Gets the header from a dbf file. /// </summary> /// <param name="dbfFile">The DBF file.</param> /// <returns></returns> public static DbaseFileHeader GetHeader(string dbfFile) { if (!File.Exists(dbfFile)) { throw new FileNotFoundException(dbfFile + " not found"); } DbaseFileHeader header = new DbaseFileHeader(); header.ReadHeader(new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.Read)), dbfFile); return(header); }
public static DbaseFileHeader GetHeader(DbaseFieldDescriptor[] dbFields, int count) { DbaseFileHeader header = new DbaseFileHeader(); header.NumRecords = count; foreach (DbaseFieldDescriptor dbField in dbFields) { header.AddColumn(dbField.Name, dbField.DbaseType, dbField.Length, dbField.DecimalCount); } return(header); }
public static void CreateShpFile(Feature polygone, List <Feature> points) { GeometryFactory outGeomFactory = new GeometryFactory(); string folderPath = Path.Combine("..\\..\\..\\data\\", polygone.Attributes["name"].ToString()); System.IO.Directory.CreateDirectory(folderPath); ShapefileDataWriter writer = new ShapefileDataWriter($"{folderPath}/{polygone.Attributes["name"]}", outGeomFactory); DbaseFileHeader outDbaseHeader = ShapefileDataWriter.GetHeader((Feature)points[0], points.Count); writer.Header = outDbaseHeader; writer.Write(points); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> private DbaseFileHeader ReadHeader() { if (m_Header == null) { m_FileReader = new BinaryReader(m_StreamProvider.OpenRead()); m_Header = new DbaseFileHeader(); // read the header m_Header.ReadHeader(m_FileReader, m_StreamProvider is FileStreamProvider ? ((FileStreamProvider)m_StreamProvider).Path : null); } return(m_Header); }
public void ok_when_writing_shapefile_with_no_features() { DbaseFileHeader header = new DbaseFileHeader(); header.AddColumn("X", 'C', 10, 0); ShapefileDataWriter writer = new ShapefileDataWriter(@"issue36") { Header = header }; IList <IFeature> features = new List <IFeature>(); writer.Write(features); }
/// <summary> /// Consumes an ADO.net datatable and correctly initializes a Shapefile header object /// </summary> /// <param name="table"></param> /// <returns></returns> public static DbaseFileHeader SetupHeader(DataTable table) { DbaseFileHeader header = new DbaseFileHeader(); foreach (DataColumn col in table.Columns) { Type t = col.DataType; string columnName = Utilities.EnsureMaxLength(col.ColumnName, 10); AddColumn(header, columnName, t); } return(header); }
internal static IEnumerable <Feature> Load(string v) { var shapeFileDataReader = Shapefile.CreateDataReader(v, new GeometryFactory()); ShapefileHeader shpHeader = shapeFileDataReader.ShapeHeader; DbaseFileHeader header = shapeFileDataReader.DbaseHeader; shapeFileDataReader.Reset(); //Read through all records of the shapefile (geometry and attributes) into a feature collection List <Feature> features = new List <Feature>(); int j = 1; while (shapeFileDataReader.Read()) { Feature feature = new Feature(); AttributesTable attributesTable = new AttributesTable(); string[] keys = new string[header.NumFields]; var pm = new PrecisionModel(10.0); var pop = new NetTopologySuite.Precision.GeometryPrecisionReducer(pm); Geometry geometry = NetTopologySuite.Simplify.DouglasPeuckerSimplifier.Simplify(pop.Reduce((Geometry)shapeFileDataReader.Geometry), 0.5); // geometry = NetTopologySuite.Operation.BoundaryOp.GetBoundary(geometry); // var pol = new NetTopologySuite.Operation.Polygonize.Polygonizer(); // pol.Add() if (geometry.IsEmpty) { continue; } for (int i = 0; i < header.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = header.Fields[i]; keys[i] = fldDescriptor.Name; attributesTable.Add(fldDescriptor.Name, shapeFileDataReader.GetValue(i + 1)); } if (!attributesTable.GetNames().Contains("NAME", StringComparer.InvariantCulture)) { attributesTable.Add("NAME", j); } feature.Geometry = geometry; feature.Attributes = attributesTable; features.Add(feature); j++; } //Close and free up any resources shapeFileDataReader.Close(); shapeFileDataReader.Dispose(); return(features); }
/// <summary> /// Gets the header information for the dbase file. /// </summary> /// <returns>DbaseFileHeader contain header and field information.</returns> private DbaseFileHeader ReadHeader() { if (m_Header == null) { FileStream stream = new FileStream(m_Filename, FileMode.Open, FileAccess.Read, FileShare.Read); m_FileReader = new BinaryReader(stream); m_Header = new DbaseFileHeader(); // read the header m_Header.ReadHeader(m_FileReader, m_Filename); } return(m_Header); }
public void TestLoadShapeFileWithEncoding() { ShapefileDataReader reader = new ShapefileDataReader("encoding_sample.shp", GeometryFactory.Default); DbaseFileHeader header = reader.DbaseHeader; Assert.AreEqual(header.Encoding.WindowsCodePage, 1252, "Invalid encoding!"); Assert.AreEqual(header.Fields[1].Name, "Test"); Assert.AreEqual(header.Fields[2].Name, "Ålder"); Assert.AreEqual(header.Fields[3].Name, "Ödestext"); Assert.IsTrue(reader.Read(), "Error reading file"); Assert.AreEqual(reader["Test"], "Testar"); Assert.AreEqual(reader["Ödestext"], "Lång text med åäö etc"); }