private void dispose(Boolean disposing) { if (IsDisposed) { return; } if (disposing) // Do deterministic finalization of managed resources { if (_baseTable != null) { _baseTable.Dispose(); } _baseTable = null; if (_reader != null) { _reader.Dispose(); } _reader = null; if (_writer != null) { _writer.Dispose(); } _writer = null; } _isOpen = false; }
public void Ctor_SendEmptyString_ShouldThrowException() { // Act. Assert.Catch <ArgumentException>(() => { m_Reader = new DbaseReader(string.Empty); }); }
public void Ctor_SendWhitespaceString_ShouldThrowException() { // Act. Assert.Catch <ArgumentException>(() => { m_Reader = new DbaseReader(" \t "); }); }
public void Ctor_SendNonExistantPath_ShouldThrowException() { // Act. Assert.Catch <FileNotFoundException>(() => { m_Reader = new DbaseReader(@"C:\this\is\sheker\path\should\never\exist\on\ur\pc"); }); }
public void Ctor_SendNullPath_ShouldThrowException() { // Act. Assert.Catch <ArgumentNullException>(() => { m_Reader = new DbaseReader((string)null); }); }
public void ReadEntry_SendNegativeIndex_ShouldThrowException() { // Arrange m_TmpFile = new TempFileCloudUploader("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(GetProvider(m_TmpFile.Path)); // Act. m_Reader.ReadEntry(-1); }
public void ReadEntry_SendOutOfBoundIndex_ShouldThrowException() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); // Act. m_Reader.ReadEntry(3); }
public ShapefileFeature(ShapeReader shapeReader, DbaseReader dbfReader, ShapeLocationInFileInfo shapeLocation, IGeometryFactory geoFactory) { m_ShapeReader = shapeReader; m_GeoFactory = geoFactory; m_ShapeLocationInfo = shapeLocation; m_LazyGeometry = new Lazy<IGeometry>(() => m_ShapeReader.ReadShapeAtOffset(m_ShapeLocationInfo.OffsetFromStartOfFile, m_GeoFactory), LazyThreadSafetyMode.ExecutionAndPublication); m_DbaseReader = dbfReader; m_LazyAttributeTable = new Lazy<IAttributesTable>(() => m_DbaseReader.ReadEntry(m_ShapeLocationInfo.ShapeIndex), LazyThreadSafetyMode.ExecutionAndPublication); }
public ShapefileFeature(ShapeReader shapeReader, DbaseReader dbfReader, ShapeLocationInFileInfo shapeLocation, IGeometryFactory geoFactory) { m_ShapeReader = shapeReader; m_GeoFactory = geoFactory; m_ShapeLocationInfo = shapeLocation; m_LazyGeometry = new Lazy <IGeometry>(() => m_ShapeReader.ReadShapeAtOffset(m_ShapeLocationInfo.OffsetFromStartOfFile, m_GeoFactory), LazyThreadSafetyMode.ExecutionAndPublication); m_DbaseReader = dbfReader; m_LazyAttributeTable = new Lazy <IAttributesTable>(() => m_DbaseReader.ReadEntry(m_ShapeLocationInfo.ShapeIndex), LazyThreadSafetyMode.ExecutionAndPublication); }
public void ReadEntry_ReadNonExistantKeyFromEntry_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileCloudUploader("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(GetProvider(m_TmpFile.Path)); var results = m_Reader.ReadEntry(0); // Act. var a = results["a"]; }
public void Ctor_SendValidParameters_ShouldReturnNotNull() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("line_ed50_geo")); // Act. m_Reader = new DbaseReader(m_TmpFile.Path); // Assert. Assert.IsNotNull(m_Reader); }
public void ReadEntry_TryReadAfterDisposed_ShouldThrowException() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); m_Reader.Dispose(); // Act. m_Reader.ReadEntry(1); }
public void ReadEntry_ReadNonExistantKeyFromEntry_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); IAttributesTable results = m_Reader.ReadEntry(0); // Act. object a = results["a"]; }
public void ReadEntry_SendOutOfBoundIndex_ShouldThrowException() { // Arrange m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); // Act. Assert.Catch <ArgumentOutOfRangeException>(() => { m_Reader.ReadEntry(3); }); }
public void ReadEntry_TryReadAfterDisposed_ShouldThrowException() { // Arrange m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); m_Reader.Dispose(); // Act. Assert.Catch <InvalidOperationException>(() => { m_Reader.ReadEntry(1); }); }
public void TestCleanup() { if (m_Reader != null) { m_Reader.Dispose(); m_Reader = null; } if (m_TmpFile != null) { m_TmpFile.Dispose(); m_TmpFile = null; } }
public void ReadEntry_ReadNonExistantKeyFromEntry_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileWriter(".dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); var results = m_Reader.ReadEntry(0); // Act. Assert.Catch <ArgumentException>(() => { object a = results["a"]; }); }
public void ExecuteRobustIntersectionQuery(ShapeFile shapeFile, Envelope bbox, FeatureDataSet ds) { //Use the spatial index to get a list of features whose boundingbox intersects bbox var objectlist = shapeFile.GetObjectIDsInView(bbox); Stream stream = new FileStream(shapeFile.Filename, FileMode.Open, FileAccess.Read); string dbfFile = Path.ChangeExtension(shapeFile.Filename, ".dbf"); string indexFile = Path.ChangeExtension(shapeFile.Filename, ".shx"); DbaseReader dbaseReader = null; if (File.Exists(dbfFile)) { dbaseReader = new DbaseReader(dbfFile); dbaseReader.Open(); } using (BinaryReader br = new BinaryReader(stream)) { using (DbaseReader dbaseFile = dbaseReader) { var dt = dbaseFile.NewTable; dt.BeginLoadData(); for (var i = 0; i < objectlist.Count; i++) { FeatureDataRow fdr; try { var geometry = shapeFile.GetFeature(objectlist[i]).Geometry; //fdr.Geometry = ReadGeometry(objectlist[i], br, dbaseFile); fdr = (FeatureDataRow)dt.LoadDataRow(dbaseFile.GetValues(objectlist[i]), true); fdr.Geometry = geometry; } catch (Exception) { Debug.WriteLine("ERROR: Bad geometry for feature at index " + objectlist[i]); } } dt.EndLoadData(); dt.AcceptChanges(); ds.Tables.Add(dt); dbaseFile.Close(); } br.Close(); } }
public void TestDbaseReader() { using (DbaseReader reader = new DbaseReader(TestUtility.GetPathToTestFile("SPATIAL_F_SKARVMUFF.dbf"))) { reader.Open(); int numberOfRecords = GetNumberOfRecords(reader); // read entire file for (uint rowid = 0; rowid < numberOfRecords; rowid++) { var values = reader.GetValues(rowid); } } }
public void TestDbaseReader() { using (DbaseReader reader = new DbaseReader(GetTestFile())) { reader.Open(); int numberOfRecords = GetNumberOfRecords(reader); // read entire file for (uint rowid = 0; rowid < numberOfRecords; rowid++) { var values = reader.GetValues(rowid); } } }
/// <summary> /// Opens the <see cref="DbaseFile"/> on the file name /// specified in the constructor, /// with a value determining if the file is locked for /// exclusive read access or not. /// </summary> /// <exception cref="ObjectDisposedException"> /// Thrown when the method is called /// and Object has been disposed. /// </exception> internal void Open(WriteAccess writeAccess) { checkState(); // TODO: implement asynchronous access _dbaseFileStream = openDbfFileStream(writeAccess); _isOpen = true; syncReadHeader(DataStream); _reader = new DbaseReader(this); if (writeAccess != WriteAccess.ReadOnly) { _writer = new DbaseWriter(this); } // TODO: NullBinaryWriter }
public ShapeDataReader(string shapeFilePath, ISpatialIndex<ShapeLocationInFileInfo> index, IGeometryFactory geoFactory, bool buildIndexAsync) { m_SpatialIndex = index; m_GeoFactory = geoFactory; ValidateParameters(shapeFilePath); m_ShapeReader = new ShapeReader(shapeFilePath); if (buildIndexAsync) { m_CancellationTokenSrc = new CancellationTokenSource(); m_IndexCreationTask = Task.Factory.StartNew(FillSpatialIndex, m_CancellationTokenSrc.Token); } else { FillSpatialIndex(); } m_DbfReader = new DbaseReader(Path.ChangeExtension(shapeFilePath, DBF_EXT)); }
public void TestDbaseBinaryTree() { using (DbaseReader reader = new DbaseReader(GetTestFile())) { reader.Open(); int numberOfRecords = GetNumberOfRecords(reader); // Create index on OBJECTNO column var indexOBJECTNO = reader.CreateDbfIndex<int>(0); // Test if index contains all records Assert.AreEqual(numberOfRecords, indexOBJECTNO.InOrder.Count()); // Test searching for all records individually for (uint rowid = 0; rowid < numberOfRecords; rowid++) { int value = (int)reader.GetValues(rowid)[1]; Assert.AreEqual(1, indexOBJECTNO.Find(value).Count()); } } }
public void TestDbaseBinaryTree() { using (DbaseReader reader = new DbaseReader(GetTestFile())) { reader.Open(); int numberOfRecords = GetNumberOfRecords(reader); // Create index on OBJECTNO column var indexOBJECTNO = reader.CreateDbfIndex <int>(0); // Test if index contains all records Assert.AreEqual(numberOfRecords, indexOBJECTNO.InOrder.Count()); // Test searching for all records individually for (uint rowid = 0; rowid < numberOfRecords; rowid++) { int value = (int)reader.GetValues(rowid)[1]; Assert.AreEqual(1, indexOBJECTNO.Find(value).Count()); } } }
public ShapeDataReader(IStreamProviderRegistry streamProviderRegistry , ISpatialIndex<ShapeLocationInFileInfo> index, IGeometryFactory geoFactory, bool buildIndexAsync) { m_SpatialIndex = index; m_GeoFactory = geoFactory; ValidateParameters(); m_ShapeReader = new ShapeReader(streamProviderRegistry); if (buildIndexAsync) { m_CancellationTokenSrc = new CancellationTokenSource(); m_IndexCreationTask = Task.Factory.StartNew(FillSpatialIndex, m_CancellationTokenSrc.Token); } else { FillSpatialIndex(); } m_DbfReader = new DbaseReader(streamProviderRegistry[StreamTypes.Data]); }
private int GetNumberOfRecords(DbaseReader reader) { var numberOfRecordsField = typeof(DbaseReader).GetField("_numberOfRecords", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); return((int)numberOfRecordsField.GetValue(reader)); }
public void Ctor_SendEmptyString_ShouldThrowException() { // Act. m_Reader = new DbaseReader(string.Empty); }
/// <summary> /// Opens the datasource /// </summary> public virtual void Open(string path) { // Get a Connector. The connector returned is guaranteed to be connected and ready to go. // Pooling.Connector connector = Pooling.ConnectorPool.ConnectorPoolManager.RequestConnector(this,true); if (!File.Exists(path)) { log.Error("Could not find " + path); return; } if (!_IsOpen) { try { this.path = path; //Initialize DBF string dbffile = this.path.Substring(0, this.path.LastIndexOf(".")) + ".dbf"; if (File.Exists(dbffile)) dbaseFile = new DbaseReader(dbffile); //Parse shape header ParseHeader(); //Read projection file ParseProjection(); fsShapeIndex = new FileStream(this.path.Remove(this.path.Length - 4, 4) + ".shx", FileMode.Open, FileAccess.Read); brShapeIndex = new BinaryReader(fsShapeIndex, Encoding.Unicode); fsShapeFile = new FileStream(this.path, FileMode.Open, FileAccess.Read); brShapeFile = new BinaryReader(fsShapeFile); InitializeShape(this.path, _FileBasedIndex); if (dbaseFile != null) dbaseFile.Open(); _IsOpen = true; } catch (IOException e) { log.Error(e.Message); _IsOpen = false; } } }
public void Ctor_SendNullPath_ShouldThrowException() { // Act. m_Reader = new DbaseReader(GetProvider(null)); }
public void Ctor_SendNonExistantPath_ShouldThrowException() { // Act. m_Reader = new DbaseReader(GetProvider(@"this/is/sheker/path/should/never/exist/on/ur/pc")); }
public void Ctor_SendNonExistantPath_ShouldThrowException() { // Act. m_Reader = new DbaseReader(@"C:\this\is\sheker\path\should\never\exist\on\ur\pc"); }
public void Ctor_SendNullPath_ShouldThrowException() { // Act. m_Reader = new DbaseReader((string)null); }
/// <summary> /// Initializes a ShapeFile DataProvider. /// </summary> /// <remarks> /// <para>If FileBasedIndex is true, the spatial index will be read from a local copy. If it doesn't exist, /// it will be generated and saved to [filename] + '.sidx'.</para> /// <para>Using a file-based index is especially recommended for ASP.NET applications which will speed up /// start-up time when the cache has been emptied. /// </para> /// </remarks> /// <param name="filename">Path to shape file</param> /// <param name="fileBasedIndex">Use file-based spatial index</param> public ShapeFile(string filename, bool fileBasedIndex) { _Filename = filename; _FileBasedIndex = (fileBasedIndex) && File.Exists(Path.ChangeExtension(filename, ".shx")); //Initialize DBF //string dbffile = _Filename.Substring(0, _Filename.LastIndexOf(".")) + ".dbf"; string dbffile = Path.ChangeExtension(filename, ".dbf"); if (File.Exists(dbffile)) dbaseFile = new DbaseReader(dbffile); //Parse shape header ParseHeader(); //Read projection file ParseProjection(); }
public void ReadEntry_ReadEntryValues_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileCloudUploader("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(GetProvider(m_TmpFile.Path)); var expectedTable = new { Ids = new double[] { 3, 2, 1 }, Strings = new[] { "str3", "str2", "str1" }, WholeNums = new double[] { 3, 2, 1 }, DecNums = new double[] { 3, 2, 1 } }; // Act. IAttributesTable[] results = { m_Reader.ReadEntry(0), m_Reader.ReadEntry(1), m_Reader.ReadEntry(2) }; // Assert. var currResIndex = 0; foreach (var res in results) { var id = res["id"]; var str = res["str"]; var wholeNum = res["wholeNum"]; var decNum = res["decNum"]; var date = res["dt"]; Assert.IsNotNull(id); Assert.IsNotNull(str); Assert.IsNotNull(wholeNum); Assert.IsNotNull(decNum); Assert.IsNotNull(date); Assert.IsInstanceOf<double>(id); Assert.IsInstanceOf<string>(str); Assert.IsInstanceOf<double>(wholeNum); Assert.IsInstanceOf<double>(decNum); Assert.IsInstanceOf<DateTime>(date); Assert.AreEqual(id, expectedTable.Ids[currResIndex]); Assert.AreEqual(str, expectedTable.Strings[currResIndex]); Assert.AreEqual(wholeNum, expectedTable.WholeNums[currResIndex]); Assert.AreEqual(decNum, expectedTable.DecNums[currResIndex]); Assert.AreEqual(date, DATE_SAVED_IN_DBF); currResIndex++; } }
public ShapefileFeature(ShapeReader shapeReader, DbaseReader dbfReader, ShapeLocationInFileInfo shapeLocation, GeometryFactory geoFactory) { FeatureId = shapeLocation.ShapeIndex; _lazyGeometry = new Lazy <Geometry>(() => shapeReader.ReadShapeAtOffset(shapeLocation.OffsetFromStartOfFile, geoFactory), LazyThreadSafetyMode.ExecutionAndPublication); _lazyAttributeTable = new Lazy <IAttributesTable>(() => dbfReader.ReadEntry(shapeLocation.ShapeIndex), LazyThreadSafetyMode.ExecutionAndPublication); }
public void Ctor_SendWhitespaceString_ShouldThrowException() { // Act. m_Reader = new DbaseReader(" \t "); }
public void ForEachIteration_ReadEntryValues_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); var expectedTable = new { Ids = new double[] { 3, 2, 1 }, Strings = new string[] { "str3", "str2", "str1" }, WholeNums = new double[] { 3, 2, 1 }, DecNums = new double[] { 3, 2, 1 }, }; // Act. IAttributesTable[] results = m_Reader.ToArray(); Assert.AreEqual(results.Length, 3); // Assert. int currResIndex = 0; foreach (IAttributesTable res in results) { object id = res["id"]; object str = res["str"]; object wholeNum = res["wholeNum"]; object decNum = res["decNum"]; object date = res["dt"]; Assert.IsNotNull(id); Assert.IsNotNull(str); Assert.IsNotNull(wholeNum); Assert.IsNotNull(decNum); Assert.IsNotNull(date); Assert.IsInstanceOf <double>(id); Assert.IsInstanceOf <string>(str); Assert.IsInstanceOf <double>(wholeNum); Assert.IsInstanceOf <double>(decNum); Assert.IsInstanceOf <DateTime>(date); Assert.AreEqual(id, expectedTable.Ids[currResIndex]); Assert.AreEqual(str, expectedTable.Strings[currResIndex]); Assert.AreEqual(wholeNum, expectedTable.WholeNums[currResIndex]); Assert.AreEqual(decNum, expectedTable.DecNums[currResIndex]); Assert.AreEqual(date, DATE_SAVED_IN_DBF); currResIndex++; } }
public void ForEachIteration_ReadEntryValues_ShoudReturnCorrectValues() { // Arrange m_TmpFile = new TempFileWriter("data.dbf", DbfFiles.Read("point_ed50_geo")); m_Reader = new DbaseReader(m_TmpFile.Path); var expectedTable = new { Ids = new double[] { 3, 2, 1 }, Strings = new string[] { "str3", "str2", "str1" }, WholeNums = new double[] { 3, 2, 1 }, DecNums = new double[] { 3, 2, 1 }, }; // Act. IAttributesTable[] results = m_Reader.ToArray(); Assert.AreEqual(results.Length, 3); // Assert. int currResIndex = 0; foreach (IAttributesTable res in results) { object id = res["id"]; object str = res["str"]; object wholeNum = res["wholeNum"]; object decNum = res["decNum"]; object date = res["dt"]; Assert.IsNotNull(id); Assert.IsNotNull(str); Assert.IsNotNull(wholeNum); Assert.IsNotNull(decNum); Assert.IsNotNull(date); Assert.IsInstanceOf<double>(id); Assert.IsInstanceOf<string>(str); Assert.IsInstanceOf<double>(wholeNum); Assert.IsInstanceOf<double>(decNum); Assert.IsInstanceOf<DateTime>(date); Assert.AreEqual(id, expectedTable.Ids[currResIndex]); Assert.AreEqual(str, expectedTable.Strings[currResIndex]); Assert.AreEqual(wholeNum, expectedTable.WholeNums[currResIndex]); Assert.AreEqual(decNum, expectedTable.DecNums[currResIndex]); Assert.AreEqual(date, DATE_SAVED_IN_DBF); currResIndex++; } }
private List<Tuple<string, string>> GetNameAndCode(int count) { // Generate Tuple (ideally struct), containing names and codes. List<Tuple<string,string>> nameAndCode; using (DbaseReader db = new DbaseReader (DbfFileName)) { db.Open (); int start = 0; //int count = stateBorders.Count (); var regionIdx = Enumerable.Range (start, count); // struct would look nicer here. nameAndCode = regionIdx.Select (iRegion => Tuple.Create ( (string)((object[])db.GetValues ((uint)iRegion)) [1], (string)((object[])db.GetValues ((uint)iRegion)) [5] )).ToList (); db.Close (); } return nameAndCode; }
/// <summary> /// Initializes a ShapeFile DataProvider. /// </summary> /// <remarks> /// <para>If FileBasedIndex is true, the spatial index will be read from a local copy. If it doesn't exist, /// it will be generated and saved to [filename] + '.sidx'.</para> /// <para>Using a file-based index is especially recommended for ASP.NET applications which will speed up /// start-up time when the cache has been emptied. /// </para> /// </remarks> /// <param name="filename">Path to shape file</param> /// <param name="fileBasedIndex">Use file-based spatial index</param> public ShapeFile(string filename, bool fileBasedIndex) { _Filename = filename; _FileBasedIndex = fileBasedIndex; //Initialize DBF string dbffile = _Filename.Substring(0, _Filename.LastIndexOf(".")) + ".dbf"; if (File.Exists(dbffile)) dbaseFile = new DbaseReader(dbffile); //Parse shape header ParseHeader(); //Read projection file ParseProjection(); }
/// <summary> /// Opens the <see cref="DbaseFile"/> on the file name /// specified in the constructor, /// with a value determining if the file is locked for /// exclusive read access or not. /// </summary> /// <exception cref="ObjectDisposedException"> /// Thrown when the method is called /// and Object has been disposed. /// </exception> internal void Open(WriteAccess writeAccess) { checkState(); // TODO: implement asynchronous access _dbaseFileStream = openDbfFileStream(writeAccess); _isOpen = true; syncReadHeader(DataStream); _reader = new DbaseReader(this); if (writeAccess != WriteAccess.ReadOnly) _writer = new DbaseWriter(this); // TODO: NullBinaryWriter }
private int GetNumberOfRecords(DbaseReader reader) { var numberOfRecordsField = typeof(DbaseReader).GetField("_numberOfRecords", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); return (int)numberOfRecordsField.GetValue(reader); }
private void dispose(bool disposing) { if (IsDisposed) { return; } if (disposing) // Do deterministic finalization of managed resources { if (_baseTable != null) _baseTable.Dispose(); _baseTable = null; if (_reader != null) _reader.Dispose(); _reader = null; if (_writer != null) _writer.Dispose(); _writer = null; } _isOpen = false; }
/// <summary> /// Opens the <see cref="DbaseFile"/> on the file name /// specified in the constructor, /// with a value determining if the file is locked for /// exclusive read access or not. /// </summary> /// <exception cref="ObjectDisposedException"> /// Thrown when the method is called /// and object has been disposed. /// </exception> internal void Open(bool exclusive) { checkState(); // TODO: implement asynchronous access #if !CFBuild _dbaseFileStream = new FileStream(_filename, FileMode.OpenOrCreate, FileAccess.ReadWrite, exclusive ? FileShare.None : FileShare.Read, 4096, FileOptions.None); #else _dbaseFileStream = new FileStream(_filename, FileMode.OpenOrCreate, FileAccess.ReadWrite, exclusive ? FileShare.None : FileShare.Read, 4096); #endif _isOpen = true; if (!_headerIsParsed) //Don't read the header if it's already parsed { _header = DbaseHeader.ParseDbfHeader(new BufferingStream(DataStream)); _baseTable = DbaseSchema.GetFeatureTableForFields(_header.Columns); _headerIsParsed = true; } _writer = new DbaseWriter(this); _reader = new DbaseReader(this); }