/// <summary> /// Copies the Table schema (column names/data types) /// from a DatatTable, but doesn't copy any values. /// </summary> /// <param name="sourceTable">The Table to obtain schema from.</param> public void CopyTableSchema(DataTable sourceTable) { DataTable.Columns.Clear(); foreach (DataColumn dc in sourceTable.Columns) { if (dc != null) { DataColumn outCol = new DataColumn(dc.ColumnName, dc.DataType, dc.Expression, dc.ColumnMapping); Field fld = new Field(outCol); DataTable.Columns.Add(fld); } } }
/// <summary> /// Copies all the features from the specified featureset. /// </summary> /// <param name="source">The source IFeatureSet to copy features from.</param> /// <param name="copyAttributes">Boolean, true if the attributes should be copied as well. If this is true, /// and the attributes are not loaded, a FillAttributes call will be made.</param> public void CopyFeatures(IFeatureSet source, bool copyAttributes) { ProgressMeter = new ProgressMeter(ProgressHandler, "Copying Features", ShapeIndices.Count); Vertex = source.Vertex.Copy(); _shapeIndices = new List<ShapeRange>(); foreach (ShapeRange range in source.ShapeIndices) { _shapeIndices.Add(range.Copy()); } foreach (DataColumn dc in source.GetColumns()) { if (dc != null) { DataColumn outCol = new DataColumn(dc.ColumnName, dc.DataType, dc.Expression, dc.ColumnMapping); Field fld = new Field(outCol); DataTable.Columns.Add(fld); } } if(source.AttributesPopulated) { // Handle data table content directly if(!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); foreach (IFeature f in source.Features) { IFeature copy = AddFeature(f.BasicGeometry); if (copyAttributes) { for (int col = 0; col < source.DataTable.Columns.Count; col++) { copy.DataRow[col] = f.DataRow[col]; } } } Features.ResumeEvents(); } else { // We need to copy the attributes, but just copy a datarow DataTable = source.DataTable.Copy(); } } else { AttributesPopulated = false; // Handle data table content directly if (!IndexMode) { // If not in index mode, just handle this using features Features.SuspendEvents(); foreach (IFeature f in source.Features) { AddFeature(f.BasicGeometry); } Features.ResumeEvents(); } if(copyAttributes) { // We need to copy the attributes, but use the page system int maxRow = NumRows(); const int pageSize = 10000; int numPages = (int)Math.Ceiling(maxRow/(double)pageSize); for(int i = 0; i < numPages; i++) { int numRows = pageSize; if (i == numPages - 1) numRows = numPages - (pageSize*i); DataTable dt = source.GetAttributes(i * pageSize, numRows); SetAttributes(i*pageSize, dt); } } } }
/// <summary> /// This systematically copies all the existing values to a new data column with the same properties, /// but with a new data type. Values that cannot convert will be set to null. /// </summary> /// <param name="oldDataColumn">The old data column to update</param> /// <param name="newDataType">The new data type that the column should become</param> /// <param name="currentRow">The row up to which values should be changed for</param> /// <param name="columnIndex">The column index of the field being changed</param> /// <param name="table"> The Table to apply this strategy to.</param> /// <returns>An integer list showing the index values of the rows where the conversion failed.</returns> public List<int> UpgradeColumn(Field oldDataColumn, Type newDataType, int currentRow, int columnIndex, DataTable table) { List<int> failureList = new List<int>(); object[] newValues = new object[table.Rows.Count]; string name = oldDataColumn.ColumnName; Field dc = new Field(oldDataColumn.ColumnName, newDataType); dc.Length = oldDataColumn.Length; dc.DecimalCount = oldDataColumn.DecimalCount; for (int row = 0; row < currentRow; row++) { try { if (table.Rows[row][name] is DBNull) newValues[row] = null; else { object obj = _dataTable.Rows[row][name]; object newObj = Convert.ChangeType(obj, newDataType); newValues[row] = newObj; } } catch { failureList.Add(row); } } int ord = oldDataColumn.Ordinal; table.Columns.Remove(oldDataColumn); table.Columns.Add(dc); dc.SetOrdinal(ord); _columns[columnIndex] = dc; for (int row = 0; row < currentRow; row++) { if (newValues[row] == null) table.Rows[row][name] = DBNull.Value; else table.Rows[row][name] = newValues[row]; } return failureList; }
private void UpdateSchema() { List<Field> tempColumns = new List<Field>(); _recordLength = 1; // delete character _numRecords = Table.Rows.Count; _updateDate = DateTime.Now; _headerLength = FileDescriptorSize + FileDescriptorSize*Table.Columns.Count + 1; if (_columns == null) _columns = new List<Field>(); // Delete any fields from the columns list that are no // longer in the data Table. List<Field> removeFields = new List<Field>(); foreach (Field fld in _columns) { if (Table.Columns.Contains(fld.ColumnName) == false) removeFields.Add(fld); else tempColumns.Add(fld); } foreach (Field field in removeFields) { _columns.Remove(field); } // Add new columns that exist in the data Table, but don't have a matching field yet. if (Table.Columns != null) { foreach (DataColumn dc in Table.Columns) { if (ColumnNameExists(dc.ColumnName)) continue; Field fld = dc as Field; if (fld == null) fld = new Field(dc); tempColumns.Add(fld); } } _columns = tempColumns; _recordLength = 1; // Recalculate the recordlength // current calculation fix proposed by Aerosol foreach (Field fld in Columns) { _recordLength = _recordLength + fld.Length; } }
///// <summary> ///// Read a single dbase record ///// </summary> ///// <returns>Returns an IFeature with information appropriate for the current row in the Table</returns> //private DataRow ReadTableRowFromBytes(int currentRow) //{ // DataRow result = _dataTable.NewRow(); // long start; // if (_hasDeletedRecords == false) // start = currentRow * _recordLength; // else // start = _offsets[currentRow]; // for (int col = 0; col < _dataTable.Columns.Count; col++) // { // // find the length of the field. // Field CurrentField = _columns[col]; // // find the field type // char tempFieldType = CurrentField.TypeCharacter; // // read the data. // byte[] cBuffer = new byte[CurrentField.Length]; // Array.Copy(_byteContent, start, cBuffer, 0, CurrentField.Length); // start += CurrentField.Length; // object tempObject = DBNull.Value; // if (IsNull(cBuffer)) continue; // switch (tempFieldType) // { // case 'L': // logical data type, one character (T,t,F,f,Y,y,N,n) // char tempChar = (char)(cBuffer[0]); // if ((tempChar == 'T') || (tempChar == 't') || (tempChar == 'Y') || (tempChar == 'y')) // tempObject = true; // else tempObject = false; // break; // case 'C': // character record. // tempObject = Encoding.Default.GetString(cBuffer).Trim().Replace("\0", ""); // //tempObject = new string(cBuffer).Trim().Replace("\0", ""); //.ToCharArray(); // break; // case 'T': // throw new NotSupportedException(); // case 'D': // date data type. // //string tempString = new string(cBuffer, 0, 4); // string tempString = Encoding.Default.GetString(cBuffer, 0, 4); // int year; // if (int.TryParse(tempString, out year) == false) break; // int month; // //tempString = new string(cBuffer, 4, 2); // tempString = Encoding.Default.GetString(cBuffer, 0, 4); // if (int.TryParse(tempString, out month) == false) break; // int day; // //tempString = new string(cBuffer, 6, 2); // tempString = Encoding.Default.GetString(cBuffer, 0, 4); // if (int.TryParse(tempString, out day) == false) break; // tempObject = new DateTime(year, month, day); // // } // break; // case 'F': // case 'B': // case 'N': // number - ESRI uses N for doubles and floats // string tempStr = Encoding.Default.GetString(cBuffer); // tempObject = DBNull.Value; // Type t = CurrentField.DataType; // if (t == typeof(byte)) // { // byte temp; // if (byte.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // // It is possible to store values larger than 255 with // // three characters. Therefore, we may have to upgrade the // // numeric type for the entire field to short. // short upTest; // if (short.TryParse(tempStr.Trim(), out upTest)) // { // // Since we were successful, we should upgrade the field to storing short values instead of byte values. // UpgradeColumn(CurrentField, typeof(short), currentRow, col, _dataTable); // tempObject = upTest; // } // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // } // else if (t == typeof(short)) // { // short temp; // if (short.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // int upTest; // if (int.TryParse(tempStr.Trim(), out upTest)) // { // UpgradeColumn(CurrentField, typeof(int), currentRow, col, _dataTable); // tempObject = upTest; // } // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // } // else if (t == typeof(int)) // { // int temp; // if (int.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // long upTest; // if (long.TryParse(tempStr.Trim(), out upTest)) // { // UpgradeColumn(CurrentField, typeof(long), currentRow, col, _dataTable); // tempObject = upTest; // } // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // } // else if (t == typeof(long)) // { // long temp; // if (long.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // else if (t == typeof(float)) // { // float temp; // if (float.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // double upTest; // if (double.TryParse(tempStr.Trim(), out upTest)) // { // UpgradeColumn(CurrentField, typeof(double), currentRow, col, _dataTable); // tempObject = upTest; // } // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // } // else if (t == typeof(double)) // { // double temp; // if (double.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // decimal upTest; // if (decimal.TryParse(tempStr.Trim(), out upTest)) // { // UpgradeColumn(CurrentField, typeof(decimal), currentRow, col, _dataTable); // tempObject = upTest; // } // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // } // else if (t == typeof(decimal)) // { // decimal temp; // if (decimal.TryParse(tempStr.Trim(), out temp)) // tempObject = temp; // else // { // UpgradeColumn(CurrentField, typeof(string), currentRow, col, _dataTable); // tempObject = tempStr; // } // } // break; // default: // throw new NotSupportedException("Do not know how to parse Field type " + tempFieldType); // } // // j++; // result[CurrentField.ColumnName] = tempObject; // } // return result; //} /// <summary> /// Read the header data from the DBF file. /// </summary> /// <param name="reader">BinaryReader containing the header.</param> private void ReadTableHeader(BinaryReader reader) { // type of reader. _fileType = reader.ReadByte(); if (_fileType != 0x03) throw new NotSupportedException("Unsupported DBF reader Type " + _fileType); // parse the update date information. int year = reader.ReadByte(); int month = reader.ReadByte(); int day = reader.ReadByte(); _updateDate = new DateTime(year + 1900, month, day); // read the number of records. _numRecords = reader.ReadInt32(); // read the length of the header structure. _headerLength = reader.ReadInt16(); // read the length of a record _recordLength = reader.ReadInt16(); // skip the reserved bytes in the header. //in.skipBytes(20); reader.ReadBytes(20); // calculate the number of Fields in the header _numFields = (_headerLength - FileDescriptorSize - 1)/FileDescriptorSize; // _numFields = (_headerLength - FileDescriptorSize) / FileDescriptorSize; _columns = new List<Field>(); for (int i = 0; i < _numFields; i++) { // read the field name char[] buffer = reader.ReadChars(11); string name = new string(buffer); int nullPoint = name.IndexOf((char) 0); if (nullPoint != -1) name = name.Substring(0, nullPoint); // read the field type char Code = (char) reader.ReadByte(); // read the field data address, offset from the start of the record. int dataAddress = reader.ReadInt32(); // read the field length in bytes byte tempLength = reader.ReadByte(); // read the field decimal count in bytes byte decimalcount = reader.ReadByte(); // read the reserved bytes. //reader.skipBytes(14); reader.ReadBytes(14); int j = 1; string tempName = name; while (_dataTable.Columns.Contains(tempName)) { tempName = name + j; j++; } name = tempName; Field myField = new Field(name, Code, tempLength, decimalcount); myField.DataAddress = dataAddress; // not sure what this does yet _columns.Add(myField); // Store fields accessible by an index _dataTable.Columns.Add(myField); } // Last byte is a marker for the end of the field definitions. reader.ReadBytes(1); }