/// <summary> /// Reads just the contents requested. Faster than returning the entire record if you have lots of attributes but only want a few. /// </summary> /// <param name="lowerPageBoundary">starting row</param> /// <param name="rowsPerPage">number of rows to return</param> /// <param name="fieldNames">fields for which data is to be returned</param> /// <returns></returns> public object[,] SupplyPageOfData(int lowerPageBoundary, int rowsPerPage, IEnumerable<string> fieldNames) { var myStream = new FileStream(_fileName, FileMode.Open, FileAccess.Read, FileShare.Read, 100000); try { var fi = new FileInfo(_fileName); // Encoding appears to be ASCII, not Unicode if ((int)fi.Length == _headerLength) { // The file is empty, so we are done here return null; } var maxRawRow = (int)((fi.Length - (HeaderLength + 1)) / _recordLength); // Set up before looping over the rows. var fieldNamesArray = fieldNames.ToArray(); var numFields = fieldNamesArray.Length; var fields = new Field[numFields]; var result = new object[rowsPerPage, numFields]; var outRow = 0; var byteContent = new byte[numFields][]; var characterContent = new char[numFields][]; var columnOffsets = new int[numFields]; var columnList = new List<KeyValuePair<int, int>>(); for (var fieldNumber = 0; fieldNumber < numFields; fieldNumber++) { var field = _columns[_dataTable.Columns[fieldNamesArray[fieldNumber]].Ordinal]; fields[fieldNumber] = field; byteContent[fieldNumber] = new byte[field.Length]; characterContent[fieldNumber] = new char[field.Length]; var column = _dataTable.Columns[fieldNamesArray[fieldNumber]].Ordinal; columnList.Add(new KeyValuePair<int, int>(column, fieldNumber)); columnOffsets[fieldNumber] = GetColumnOffset(column); } columnList.Sort(CompareKvpByKey); // We want to read the attributes in order for each row because it is faster. for (var row = lowerPageBoundary; row < lowerPageBoundary + rowsPerPage; row++) { if (row > maxRawRow) { if (outRow < rowsPerPage) { var partialResult = new object[outRow, numFields]; Array.Copy(result, partialResult, outRow * numFields); return partialResult; } } var fileIndex = GetFileIndex(row); foreach (KeyValuePair<int, int> columnFieldNumberPair in columnList) { int fieldNumber = columnFieldNumberPair.Value; long offset = _headerLength + 1 + _recordLength * fileIndex + columnOffsets[fieldNumber]; myStream.Seek(offset, SeekOrigin.Begin); var field = fields[fieldNumber]; myStream.Read(byteContent[fieldNumber], 0, field.Length); Encoding.Default.GetChars(byteContent[fieldNumber], 0, field.Length, characterContent[fieldNumber], 0); result[outRow, fieldNumber] = ParseColumn(field, row, characterContent[fieldNumber], null); } outRow++; } return result; } finally { myStream.Close(); } }
/// <summary> /// Read the header data from the DBF file. /// </summary> /// <param name="reader">BinaryReader containing the header.</param> private void ReadTableHeader(BinaryReader reader) { // type of reader. _fileType = reader.ReadByte(); if (_fileType != 0x03) throw new NotSupportedException("Unsupported DBF reader Type " + _fileType); // parse the update date information. int year = reader.ReadByte(); int month = reader.ReadByte(); int day = reader.ReadByte(); try { _updateDate = new DateTime(year + 1900, month, day); } catch { // If the Update Date in the header is not in correct format, just use the modify time of the file _updateDate = new FileInfo(_fileName).LastWriteTime; } // read the number of records. _numRecords = reader.ReadInt32(); // read the length of the header structure. _headerLength = reader.ReadInt16(); // read the length of a record _recordLength = reader.ReadInt16(); // skip the reserved bytes in the header. reader.ReadBytes(20); // calculate the number of Fields in the header _numFields = (_headerLength - FILE_DESCRIPTOR_SIZE - 1) / FILE_DESCRIPTOR_SIZE; _columns = new List<Field>(); for (int i = 0; i < _numFields; i++) { // read the field name char[] buffer = reader.ReadChars(11); string name = new string(buffer); int nullPoint = name.IndexOf((char)0); if (nullPoint != -1) name = name.Substring(0, nullPoint); // read the field type char code = (char)reader.ReadByte(); // read the field data address, offset from the start of the record. int dataAddress = reader.ReadInt32(); // read the field length in bytes byte tempLength = reader.ReadByte(); // read the field decimal count in bytes byte decimalcount = reader.ReadByte(); // read the reserved bytes. //reader.skipBytes(14); reader.ReadBytes(14); int j = 1; string tempName = name; while (_dataTable.Columns.Contains(tempName)) { tempName = name + j; j++; } name = tempName; Field myField = new Field(name, code, tempLength, decimalcount) { DataAddress = dataAddress }; _columns.Add(myField); // Store fields accessible by an index _dataTable.Columns.Add(myField); } // Last byte is a marker for the end of the field definitions. reader.ReadBytes(1); }
/// <summary> /// This systematically copies all the existing values to a new data column with the same properties, /// but with a new data type. Values that cannot convert will be set to null. /// </summary> /// <param name="oldDataColumn">The old data column to update</param> /// <param name="newDataType">The new data type that the column should become</param> /// <param name="currentRow">The row up to which values should be changed for</param> /// <param name="columnIndex">The column index of the field being changed</param> /// <param name="table"> The Table to apply this strategy to.</param> /// <returns>An integer list showing the index values of the rows where the conversion failed.</returns> public List<int> UpgradeColumn(Field oldDataColumn, Type newDataType, int currentRow, int columnIndex, DataTable table) { List<int> failureList = new List<int>(); object[] newValues = new object[table.Rows.Count]; string name = oldDataColumn.ColumnName; Field dc = new Field(oldDataColumn.ColumnName, newDataType) { Length = oldDataColumn.Length, DecimalCount = oldDataColumn.DecimalCount }; for (int row = 0; row < currentRow; row++) { try { if (table.Rows[row][name] is DBNull) newValues[row] = null; else { object obj = _dataTable.Rows[row][name]; object newObj = Convert.ChangeType(obj, newDataType); newValues[row] = newObj; } } catch { failureList.Add(row); } } int ord = oldDataColumn.Ordinal; table.Columns.Remove(oldDataColumn); table.Columns.Add(dc); dc.SetOrdinal(ord); _columns[columnIndex] = dc; for (int row = 0; row < currentRow; row++) { if (newValues[row] == null) table.Rows[row][name] = DBNull.Value; else table.Rows[row][name] = newValues[row]; } return failureList; }
/// <summary> /// Parse the character data for one column into an object ready for insertion into a data row /// </summary> /// <param name="field"></param> /// <param name="currentRow"></param> /// <param name="cBuffer"></param> /// <param name="table"></param> /// <returns></returns> private object ParseColumn(Field field, int currentRow, char[] cBuffer, DataTable table) { // If table is null, an exception will be thrown rather than attempting to upgrade the column when a parse error occurs. const string parseErrString = "Cannot parse {0} at row {1:D}, column {2:D} ({3}) in file {4} using field type {5}, and no DataTable to upgrade column"; // find the field type char tempFieldType = field.TypeCharacter; object tempObject = DBNull.Value; switch (tempFieldType) { case 'L': // logical data type, one character (T, t, F, f, Y, y, N, n) char tempChar = cBuffer[0]; if ((tempChar == 'T') || (tempChar == 't') || (tempChar == 'Y') || (tempChar == 'y')) tempObject = true; else tempObject = false; break; case 'C': // character record. tempObject = new string(cBuffer).Trim().Replace("\0", string.Empty); //.ToCharArray(); break; case 'T': throw new NotSupportedException(); case 'D': // date data type. //char[] ebuffer = new char[8]; //ebuffer = _reader.ReadChars(8); //else //{ string tempString = new string(cBuffer, 0, 4); int year; if (int.TryParse(tempString, out year) == false) break; int month; tempString = new string(cBuffer, 4, 2); if (int.TryParse(tempString, out month) == false) break; int day; tempString = new string(cBuffer, 6, 2); if (int.TryParse(tempString, out day) == false) break; tempObject = new DateTime(year, month, day); // } break; case 'F': case 'B': case 'N': // number - ESRI uses N for doubles and floats tempObject = ParseNumericColumn(field, currentRow, cBuffer, table, parseErrString); break; default: throw new NotSupportedException("Do not know how to parse Field type " + tempFieldType); } return tempObject; }
private object ParseNumericColumn(Field field, int currentRow, char[] cBuffer, DataTable table, string parseErrString) { object tempObject; string tempStr = new string(cBuffer); tempObject = DBNull.Value; Type t = field.DataType; Lazy<string> errorMessage = new Lazy<string>(() => { return String.Format(parseErrString, tempStr, currentRow, field.Ordinal, field.ColumnName, _fileName, t); }); if (t == typeof(byte)) { byte temp; if (byte.TryParse(tempStr.Trim(), out temp)) tempObject = temp; else { // It is possible to store values larger than 255 with // three characters. Therefore, we may have to upgrade the // numeric type for the entire field to short. if (null == table) throw new InvalidDataException(errorMessage.Value); short upTest; if (short.TryParse(tempStr.Trim(), out upTest)) { // Since we were successful, we should upgrade the field to storing short values instead of byte values. UpgradeColumn(field, typeof(short), currentRow, field.Ordinal, table); tempObject = upTest; } else { UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } } else if (t == typeof(short)) { short temp; if (short.TryParse(tempStr.Trim(), out temp)) tempObject = temp; else { if (null == table) throw new InvalidDataException(errorMessage.Value); int upTest; if (int.TryParse(tempStr.Trim(), out upTest)) { UpgradeColumn(field, typeof(int), currentRow, field.Ordinal, table); tempObject = upTest; } else { UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } } else if (t == typeof(int)) { int temp; if (int.TryParse(tempStr.Trim(), out temp)) tempObject = temp; else { if (null == table) throw new InvalidDataException(errorMessage.Value); long upTest; if (long.TryParse(tempStr.Trim(), out upTest)) { UpgradeColumn(field, typeof(long), currentRow, field.Ordinal, table); tempObject = upTest; } else { UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } } else if (t == typeof(long)) { long temp; if (long.TryParse(tempStr.Trim(), out temp)) tempObject = temp; else { if (null == table) throw new InvalidDataException(errorMessage.Value); UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } else if (t == typeof(float)) { float temp; if (float.TryParse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider, out temp)) tempObject = temp; else { if (null == table) throw new InvalidDataException(errorMessage.Value); double upTest; if (double.TryParse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider, out upTest)) { UpgradeColumn(field, typeof(double), currentRow, field.Ordinal, table); tempObject = upTest; } else { UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } } else if (t == typeof(double)) { double temp; if (double.TryParse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider, out temp)) tempObject = temp; else if (String.IsNullOrWhiteSpace(tempStr)) //handle case when value is NULL tempObject = DBNull.Value; else { if (null == table) throw new InvalidDataException(errorMessage.Value); decimal upTest; if (decimal.TryParse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider, out upTest)) { UpgradeColumn(field, typeof(decimal), currentRow, field.Ordinal, table); tempObject = upTest; } else { UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } } else if (t == typeof(decimal)) { decimal temp; if (decimal.TryParse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider, out temp)) tempObject = temp; else { if (null == table) throw new InvalidDataException(errorMessage.Value); UpgradeColumn(field, typeof(string), currentRow, field.Ordinal, table); tempObject = tempStr; } } return tempObject; }
/// <inheritdoc/> public void CopyTableSchema(DataTable sourceTable) { DataTable.Columns.Clear(); foreach (DataColumn dc in sourceTable.Columns) { if (dc != null) { DataColumn outCol = new DataColumn(dc.ColumnName, dc.DataType, dc.Expression, dc.ColumnMapping); Field fld = new Field(outCol); DataTable.Columns.Add(fld); } } }
/// <summary> /// This gets a copy of the actual internal list of columns. /// This should never be used to make changes to the column collection. /// </summary> public override DataColumn[] GetColumns() { var result = new DataColumn[_attributeTable.Columns.Count]; for (var i = 0; i < _attributeTable.Columns.Count; i++) { var field = _attributeTable.Columns[i]; result[i] = new Field(field.ColumnName, field.TypeCharacter, field.Length, field.DecimalCount); } return result; }
/// <summary> /// Add a field /// </summary> /// <param name="field"></param> public void Add(Field field) { _posLookup.Add(field.Ordinal, field); _nameLookup.Add(field.ColumnName, field); }
/// <inheritdoc/> public void CopyFeatures(IFeatureSet source, bool copyAttributes) { ProgressMeter = new ProgressMeter(ProgressHandler, "Copying Features", ShapeIndices.Count); Vertex = source.Vertex.Copy(); _shapeIndices = new List<ShapeRange>(); foreach (ShapeRange range in source.ShapeIndices) { _shapeIndices.Add(range.Copy()); } if (copyAttributes) { foreach (DataColumn dc in source.GetColumns()) { if (dc != null) { DataColumn outCol = new DataColumn(dc.ColumnName, dc.DataType, dc.Expression, dc.ColumnMapping); Field fld = new Field(outCol); DataTable.Columns.Add(fld); } } } if (source.AttributesPopulated) { // Handle data table content directly if (!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); int i = 0; foreach (IFeature f in source.Features) { IFeature copy = AddFeature(f.BasicGeometry); copy.ShapeIndex = ShapeIndices[i]; if (copyAttributes) { copy.DataRow.ItemArray = f.DataRow.ItemArray.Copy(); } i++; } Features.ResumeEvents(); } else { // We need to copy the attributes, but just copy a datarow if (copyAttributes) { foreach (DataRow row in source.DataTable.Rows) { DataRow result = DataTable.NewRow(); result.ItemArray = row.ItemArray.Copy(); DataTable.Rows.Add(result); } } } } else { AttributesPopulated = false; // Handle data table content directly if (!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); int i = 0; foreach (IFeature f in source.Features) { IFeature result = AddFeature(f.BasicGeometry); result.ShapeIndex = ShapeIndices[i]; i++; } Features.ResumeEvents(); } if (copyAttributes) { // We need to copy the attributes, but use the page system int maxRow = NumRows(); const int pageSize = 10000; int numPages = (int)Math.Ceiling(maxRow / (double)pageSize); for (int i = 0; i < numPages; i++) { int numRows = pageSize; if (i == numPages - 1) { numRows = numPages - (pageSize * i); } DataTable dt = source.GetAttributes(i * pageSize, numRows); SetAttributes(i * pageSize, dt); } } } }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="value"></param> public void SetColumn(Field field, DateTime value) { SetColumn(field, value.ToString("yyyyMMdd")); }
/// <summary> /// Convert the byte data for a column into the appropriate data value /// </summary> /// <param name="field">Column information for data value being parsed</param> /// <returns></returns> public object ParseColumn(Field field) { var cBuffer = new char[field.Length]; Encoding.Default.GetChars(ByteContent, field.DataAddress, field.Length, cBuffer, 0); object tempObject = null; switch (field.TypeCharacter) { case 'L': // logical data type, one character (T, t, F, f, Y, y, N, n) char tempChar = cBuffer[0]; if ((tempChar == 'T') || (tempChar == 't') || (tempChar == 'Y') || (tempChar == 'y')) tempObject = true; else tempObject = false; break; case 'C': // character record. tempObject = new string(cBuffer).Trim(new[] { '\0' }); break; case 'T': throw new NotSupportedException(); case 'D': // date data type. var tempString = new string(cBuffer, 0, 4); int year; if (int.TryParse(tempString, out year) == false) break; int month; tempString = new string(cBuffer, 4, 2); if (int.TryParse(tempString, out month) == false) break; int day; tempString = new string(cBuffer, 6, 2); if (int.TryParse(tempString, out day) == false) break; tempObject = new DateTime(year, month, day); break; case 'F': case 'B': case 'N': // number - Esri uses N for doubles and floats string tempStr = new string(cBuffer).Trim(new[] { '\0' }).Trim(); tempObject = DBNull.Value; switch (Type.GetTypeCode(field.DataType)) { case TypeCode.Double: tempObject = double.Parse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider); break; case TypeCode.Byte: tempObject = byte.Parse(tempStr); break; case TypeCode.Int16: tempObject = short.Parse(tempStr); break; case TypeCode.Int32: tempObject = int.Parse(tempStr); break; case TypeCode.Int64: tempObject = long.Parse(tempStr); break; case TypeCode.Single: tempObject = float.Parse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider); break; case TypeCode.Decimal: tempObject = decimal.Parse(tempStr, NumberStyles.Number, NumberConverter.NumberConversionFormatProvider); break; } break; default: throw new NotSupportedException("Do not know how to parse Field type " + field.DataType); } return tempObject; }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="value"></param> public void SetColumn(Field field, bool value) { Encoding.Default.GetBytes(value ? "T" : "F", 0, 1, ByteContent, field.DataAddress); Modified = true; }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="value"></param> public void SetColumn(Field field, Int64 value) { string str = value.ToString(); string text = str.PadLeft(field.Length, ' '); string dbaseString = text.Substring(0, field.Length); char[] chars = dbaseString.ToCharArray(); Encoding.Default.GetBytes(chars, 0, chars.Length, ByteContent, field.DataAddress); Modified = true; }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="value"></param> public void SetColumn(Field field, decimal value) { char[] test = field.NumberConverter.ToChar(value); Encoding.Default.GetBytes(test, 0, test.Length, ByteContent, field.DataAddress); Modified = true; }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="dbNull"></param> public void SetColumn(Field field, DBNull dbNull) { var chars = new char[field.Length]; for (var j = 0; j < field.Length; j++) chars[j] = ' '; Encoding.Default.GetBytes(chars, 0, chars.Length, ByteContent, field.DataAddress); Modified = true; }
/// <summary> /// Convert value to bytes and place in ByteContent at correct location /// </summary> /// <param name="field">Column information for the conversion</param> /// <param name="value"></param> public void SetColumn(Field field, float value) { if (field.TypeCharacter == 'F') { SetColumn(field, value.ToString()); } else { char[] test = field.NumberConverter.ToChar(value); Encoding.Default.GetBytes(test, 0, test.Length, ByteContent, field.DataAddress); Modified = true; } }