public static List <DbfFieldDescriptor> GetDbfSchema(string dbfFileName, Encoding encoding) { System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open); System.IO.BinaryReader reader = new System.IO.BinaryReader(stream); byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader))); DbfHeader header = IRI.Ket.IO.BinaryStream.ByteArrayToStructure <DbfHeader>(buffer); List <DbfFieldDescriptor> columns = new List <DbfFieldDescriptor>(); if ((header.LengthOfHeader - 33) % 32 != 0) { throw new NotImplementedException(); } int numberOfFields = (header.LengthOfHeader - 33) / 32; for (int i = 0; i < numberOfFields; i++) { buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor))); columns.Add(DbfFieldDescriptor.Parse(buffer, encoding)); } reader.Close(); stream.Close(); return(columns); }
public static void Write(string fileName, System.Data.DataTable table, Encoding encoding) { System.IO.Stream stream = new System.IO.FileStream(fileName, System.IO.FileMode.Create); System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream); List <DbfFieldDescriptor> columns = MakeDbfFields(table.Columns); DbfHeader header = new DbfHeader(table.Rows.Count, table.Columns.Count, GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (var item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); for (int i = 0; i < table.Rows.Count; i++) { // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); for (int j = 0; j < table.Columns.Count; j++) { byte[] temp = new byte[columns[j].Length]; string value = table.Rows[i][j].ToString().Trim(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); } } //End of file writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); }
public static void Write <T>(string fileName, List <T> values, List <Func <T, object> > mapping, List <DbfFieldDescriptor> columns, Encoding encoding) { int control = 0; try { if (columns.Count != mapping.Count) { throw new NotImplementedException(); } System.IO.Stream stream = new System.IO.FileStream(fileName, System.IO.FileMode.Create); System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream); DbfHeader header = new DbfHeader(values.Count, mapping.Count, GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (var item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); for (int i = 0; i < values.Count; i++) { control = i; // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); for (int j = 0; j < mapping.Count; j++) { byte[] temp = new byte[columns[j].Length]; object value = mapping[j](values[i]); if (value != null) { encoding.GetBytes(value.ToString(), 0, value.ToString().Length, temp, 0); } string tt = encoding.GetString(temp); var le = tt.Length; writer.Write(temp); } } //End of file writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); System.IO.File.WriteAllText(GetCpgFileName(fileName), encoding.BodyName); } catch (Exception ex) { string message = ex.Message; string m2 = message + " " + control.ToString(); } }
public static object[][] ReadToObject(string dbfFileName, string tableName) { System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open); System.IO.BinaryReader reader = new System.IO.BinaryReader(stream); byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader))); DbfHeader header = IRI.Ket.IO.BinaryStream.ByteArrayToStructure <DbfHeader>(buffer); List <DbfFieldDescriptor> columns = new List <DbfFieldDescriptor>(); if ((header.LengthOfHeader - 33) % 32 != 0) { throw new NotImplementedException(); } int numberOfFields = (header.LengthOfHeader - 33) / 32; for (int i = 0; i < numberOfFields; i++) { buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor))); columns.Add(DbfFieldDescriptor.Parse(buffer, DbfFileFormat.fieldsEncoding)); } //System.Data.DataTable result = MakeTableSchema(tableName, columns); var result = new object[header.NumberOfRecords][]; ((FileStream)reader.BaseStream).Seek(header.LengthOfHeader, SeekOrigin.Begin); for (int i = 0; i < header.NumberOfRecords; i++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = reader.ReadBytes(header.LengthOfEachRecord); BinaryReader recordReader = new BinaryReader(new MemoryStream(buffer)); // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) if (recordReader.ReadChar() == '*') { continue; } object[] values = new object[columns.Count]; for (int j = 0; j < columns.Count; j++) { int fieldLenth = columns[j].Length; values[j] = MapFunction[columns[j].Type](recordReader.ReadBytes(fieldLenth)); } recordReader.Close(); result[i] = values; } reader.Close(); stream.Close(); return(result); }