private void ReadHeader() { if (reader == null) { return; } byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); this.header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); fields = new List <DBFFieldDescriptor>(); while ((reader.PeekChar() != 13) && (fields.Count < 400)) { buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DBFFieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); var fieldDescriptor = (DBFFieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFFieldDescriptor)); if ((fieldDescriptor.Flags & DBFFieldFlags.System) != DBFFieldFlags.System) { fields.Add(fieldDescriptor); } handle.Free(); } byte headerTerminator = reader.ReadByte(); byte[] backlink = reader.ReadBytes(263); }
public static string[] GetFields(string dbfFile) { // If there isn't even a file, just return an empty DataTable if ((false == File.Exists(dbfFile))) { return(null); } BinaryReader br = null; try { // Read the header into a buffer br = new BinaryReader(File.OpenRead(dbfFile)); br.BaseStream.Seek(29, SeekOrigin.Begin); //Seek to encoding flag Encoding _fileEncoding = GetDbaseLanguageDriver(br.ReadByte()); //Read and parse Language driver br.BaseStream.Seek(0, SeekOrigin.Begin); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); List <string> fieldnames = new List <string>(); int NumberOfColumns = (header.headerLen - 31) / 32; for (int i = 0; i < NumberOfColumns; i++) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); fieldnames.Add(((FieldDescriptor)fields[i]).fieldName); } return(fieldnames.ToArray()); } finally { if (br != null) { br.Close(); } } }
public static DataTable ReadDBF(Stream dbfFileStream) { //long start = DateTime.Now.Ticks; DataTable dt = new DataTable(); BinaryReader recReader; string number; string year; string month; string day; long lDate; long lTime; DataRow row; int fieldIndex; BinaryReader br = null; try { // Read the header into a buffer br = new BinaryReader(dbfFileStream); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); ArrayList fields = new ArrayList(); while ((13 != br.PeekChar())) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } ((Stream)br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); DataColumn col = null; foreach (FieldDescriptor field in fields) { number = Encoding.Default.GetString(recReader.ReadBytes(field.fieldLen)); switch (field.fieldType) { case 'N': if (number.IndexOf(".") > -1) { col = new DataColumn(field.fieldName, typeof(decimal)); } else { col = new DataColumn(field.fieldName, typeof(int)); } break; case 'C': col = new DataColumn(field.fieldName, typeof(string)); break; case 'T': //col = new DataColumn(field.fieldName, typeof(string)); col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'D': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'L': col = new DataColumn(field.fieldName, typeof(bool)); break; case 'F': col = new DataColumn(field.fieldName, typeof(Double)); break; } dt.Columns.Add(col); } // Skip past the end of the header. ((Stream)br.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); int counter = -1; // Read in all the records while (counter < header.numRecords - 1) { counter++; buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); if (recReader.ReadChar() == '*') { continue; } fieldIndex = 0; row = dt.NewRow(); foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': // Number number = Encoding.Default.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { if (number.IndexOf(".") > -1) { row[fieldIndex] = decimal.Parse(number); } else { row[fieldIndex] = int.Parse(number); } } else { row[fieldIndex] = 0; } break; case 'C': // String row[fieldIndex] = Encoding.Default.GetString(recReader.ReadBytes(field.fieldLen)); break; case 'D': // Date (YYYYMMDD) year = Encoding.Default.GetString(recReader.ReadBytes(4)); month = Encoding.Default.GetString(recReader.ReadBytes(2)); day = Encoding.Default.GetString(recReader.ReadBytes(2)); row[fieldIndex] = System.DBNull.Value; try { if (IsNumber(year) && IsNumber(month) && IsNumber(day)) { if ((Int32.Parse(year) > 1900)) { row[fieldIndex] = new DateTime(Int32.Parse(year), Int32.Parse(month), Int32.Parse(day)); } } } catch { } break; case 'T': // Timestamp, 8 bytes - two integers, first for date, second for time lDate = recReader.ReadInt32(); lTime = recReader.ReadInt32() * 10000L; row[fieldIndex] = JulianToDateTime(lDate).AddTicks(lTime); break; case 'L': // Boolean (Y/N) if ('Y' == recReader.ReadByte()) { row[fieldIndex] = true; } else { row[fieldIndex] = false; } break; case 'F': number = Encoding.Default.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { row[fieldIndex] = double.Parse(number); } else { row[fieldIndex] = 0.0F; } break; } fieldIndex++; } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } } return(dt); }
// Read an entire standard DBF file into a DataTable public static DataTable ReadDBF(string dbfFile) { long start = DateTime.Now.Ticks; DataTable dt = new DataTable(); BinaryReader recReader; string number; string year; string month; string day; long lDate; long lTime; DataRow row; int fieldIndex; // If there isn't even a file, just return an empty DataTable if ((false == File.Exists(dbfFile))) { return(dt); } BinaryReader br = null; try { // Read the header into a buffer br = new BinaryReader(File.OpenRead(dbfFile)); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != br.PeekChar())) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } // Read in the first row of records, we need this to help determine column types below ((FileStream)br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // Create the columns in our new DataTable DataColumn col = null; foreach (FieldDescriptor field in fields) { number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); switch (field.fieldType) { case 'N': if (number.IndexOf(".") > -1) { col = new DataColumn(field.fieldName, typeof(decimal)); } else { col = new DataColumn(field.fieldName, typeof(int)); } break; case 'C': col = new DataColumn(field.fieldName, typeof(string)); break; case 'T': // You can uncomment this to see the time component in the grid //col = new DataColumn(field.fieldName, typeof(string)); col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'D': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'L': col = new DataColumn(field.fieldName, typeof(bool)); break; case 'F': col = new DataColumn(field.fieldName, typeof(Double)); break; } dt.Columns.Add(col); } // Skip past the end of the header. ((FileStream)br.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); // Read in all the records for (int counter = 0; counter <= header.numRecords - 1; counter++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) if (recReader.ReadChar() == '*') { continue; } // Loop through each field in a record fieldIndex = 0; row = dt.NewRow(); foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': // Number // If you port this to .NET 2.0, use the Decimal.TryParse method number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { if (number.IndexOf(".") > -1) { decimal a; number = number.Replace(".", ","); decimal.TryParse(number, out a); row[fieldIndex] = a; } else { row[fieldIndex] = int.Parse(number); } } else { row[fieldIndex] = 0; } break; case 'C': // String row[fieldIndex] = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); break; case 'D': // Date (YYYYMMDD) year = Encoding.ASCII.GetString(recReader.ReadBytes(4)); month = Encoding.ASCII.GetString(recReader.ReadBytes(2)); day = Encoding.ASCII.GetString(recReader.ReadBytes(2)); row[fieldIndex] = System.DBNull.Value; try { if (IsNumber(year) && IsNumber(month) && IsNumber(day)) { if ((Int32.Parse(year) > 1900)) { row[fieldIndex] = new DateTime(Int32.Parse(year), Int32.Parse(month), Int32.Parse(day)); } } } catch {} break; case 'T': // Timestamp, 8 bytes - two integers, first for date, second for time // Date is the number of days since 01/01/4713 BC (Julian Days) // Time is hours * 3600000L + minutes * 60000L + Seconds * 1000L (Milliseconds since midnight) lDate = recReader.ReadInt32(); lTime = recReader.ReadInt32() * 10000L; row[fieldIndex] = JulianToDateTime(lDate).AddTicks(lTime); break; case 'L': // Boolean (Y/N) if ('Y' == recReader.ReadByte()) { row[fieldIndex] = true; } else { row[fieldIndex] = false; } break; case 'F': number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { row[fieldIndex] = double.Parse(number); } else { row[fieldIndex] = 0.0F; } break; } fieldIndex++; } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } } long count = DateTime.Now.Ticks - start; return(dt); }
// Read an entire standard DBF stream into a DataTable public static DataTable ReadDBF(Stream dbfStream, string tableName) { if (BeginSomeStep != null) { BeginSomeStep(className, new dbfCoreEventArgs("Обработка таблицы " + tableName + "...", "")); } //Console.WriteLine("Обработка таблицы {0}...", tableName); DataTable dt = new DataTable(); BinaryReader recReader; DataRow row; int fieldIndex; dt.TableName = tableName; BinaryReader dbfReader = null; try { //br = new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); dbfReader = new BinaryReader(dbfStream); // Read the header into a buffer byte[] buffer = dbfReader.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != dbfReader.PeekChar())) { buffer = dbfReader.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); if (fields.Count > Int16.MaxValue) { return(null); } } // Read in the first row of records, we need this to help determine column types below (dbfReader.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = dbfReader.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // Create the columns in our new DataTable DataColumn col = null; if (readDeleted) { dt.Columns.Add(new DataColumn("ISDELETE", typeof(bool))); } col = new DataColumn("IDENTIFIER", typeof(long)); col.AutoIncrement = true; col.AutoIncrementSeed = 1; dt.Columns.Add(col); dt.PrimaryKey = new DataColumn[] { dt.Columns["IDENTIFIER"] }; foreach (FieldDescriptor field in fields) { byte[] NumberByteArray = recReader.ReadBytes(field.fieldLen); switch (field.fieldType) { case dBaseType.N: if (dBaseConverter.N_IsDecimal(NumberByteArray)) { col = new DataColumn(field.fieldName, typeof(decimal)); col.ExtendedProperties.Add("N_Precision", field.decimalCount); } else { col = new DataColumn(field.fieldName, typeof(int)); } break; case dBaseType.C: col = new DataColumn(field.fieldName, typeof(string)); col.MaxLength = field.fieldLen; break; case dBaseType.T: col = new DataColumn(field.fieldName, typeof(DateTime)); break; case dBaseType.D: col = new DataColumn(field.fieldName, typeof(DateTime)); break; case dBaseType.L: col = new DataColumn(field.fieldName, typeof(bool)); break; case dBaseType.F: col = new DataColumn(field.fieldName, typeof(Double)); break; case dBaseType.M: // MEMO //col = new DataColumn(field.fieldName, typeof(byte[])); col = new DataColumn(field.fieldName, typeof(string)); break; } int i = 0; var name = col.ColumnName; while (dt.Columns.Contains(col.ColumnName)) { i++; col.ColumnName = string.Format("{0}_{1}", name, i); } dt.Columns.Add(col); } // Skip past the end of the header. (dbfReader.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); if (sqlParameters != null) { SQLHelper.CreateTable(dt, sqlParameters); } int totalRowsCount = 0; bool readOnlyOneBlock = tableName.ToLower() == "n_shk"; // Read in all the records for (int counter = 0; counter < header.numRecords; counter++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = dbfReader.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) bool isDeleted = recReader.ReadChar() == '*'; // Читаем первый байт записи, если "*" - запись удалена if (isDeleted && !readDeleted) { continue; } // Loop through each field in a record row = dt.NewRow(); if (readDeleted) { row[0] = isDeleted; fieldIndex = 2; // Данные начинаются с 3-го поля } else { fieldIndex = 1; // Данные начинаются со 2-го поля, отсутствует колонка с признаком удаления } foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case dBaseType.N: // Number byte[] NumberBytes = recReader.ReadBytes(field.fieldLen); if (dBaseConverter.N_IsDecimal(NumberBytes)) { row[fieldIndex] = dBaseConverter.N_ToDecimal(NumberBytes); } else { row[fieldIndex] = dBaseConverter.N_ToInt(NumberBytes); } break; case dBaseType.C: // String row[fieldIndex] = dBaseConverter.C_ToString(recReader.ReadBytes(field.fieldLen), encoding); break; case dBaseType.M: // Memo //row[fieldIndex] = ReadMemoBlock(dBaseConverter.N_ToInt(recReader.ReadBytes(field.fieldLen))); row[fieldIndex] = dBaseConverter.C_ToString(ReadMemoBlock(dBaseConverter.N_ToInt(recReader.ReadBytes(field.fieldLen)), readOnlyOneBlock), encoding); //int n = dBaseConverter.N_ToInt(recReader.ReadBytes(field.fieldLen)); row[fieldIndex] = n.ToString() + ';' + dBaseConverter.C_ToString(ReadMemoBlock(n), encoding); break; case dBaseType.D: // Date (YYYYMMDD) DateTime DTFromFile = dBaseConverter.D_ToDateTime(recReader.ReadBytes(8)); if (DTFromFile == DateTime.MinValue) { row[fieldIndex] = System.DBNull.Value; } else { row[fieldIndex] = DTFromFile; } break; case dBaseType.T: row[fieldIndex] = dBaseConverter.T_ToDateTime(recReader.ReadBytes(8)); break; case dBaseType.L: // Boolean (Y/N) row[fieldIndex] = dBaseConverter.L_ToBool(recReader.ReadByte()); break; case dBaseType.F: row[fieldIndex] = dBaseConverter.F_ToDouble(recReader.ReadBytes(field.fieldLen)); break; } fieldIndex++; } recReader.Close(); dt.Rows.Add(row); if ((dataTableMaxRows > 0 && (counter + 1) % dataTableMaxRows == 0) || counter == header.numRecords - 1) { StringBuilder str = new StringBuilder("\rВыполнено: " + ((counter + 1.0) / header.numRecords * 100.0).ToString() + "% (" + (counter + 1).ToString() + " из " + header.numRecords + ")."); if (BeginSomeIteration != null) { BeginSomeIteration(className, new dbfCoreEventArgs(str.ToString(), "")); } //Console.Write("\rВыполнено: {0}% ({1} из {2}).", (int)((counter + 1.0) / header.numRecords * 100.0), counter + 1, header.numRecords); if (sqlParameters != null) { totalRowsCount = SQLHelper.BulkCopyToSQL(dt, sqlParameters); dt.Rows.Clear(); } } } //Console.WriteLine(); if (BeginSomeStep != null) { BeginSomeStep(className, new dbfCoreEventArgs("Всего записей в DBF: " + header.numRecords, "")); } //Console.WriteLine("Всего записей в DBF: {0}", header.numRecords); if (BeginSomeStep != null) { BeginSomeStep(className, new dbfCoreEventArgs("Всего записано в БД: " + totalRowsCount, "")); } //Console.WriteLine("Всего записано в БД: {0}", totalRowsCount); if (BeginSomeStep != null) { BeginSomeStep(className, new dbfCoreEventArgs(new string('-', 70), "")); } //Console.WriteLine(new string('-', 79)); } catch { throw; } finally { if (dbfReader != null) { dbfReader.Close(); dbfReader = null; } if (fptReader != null) { fptReader.Close(); fptReader = null; } } //long count = DateTime.Now.Ticks - start; return(dt); }
// Read an entire standard DBF file into a DataTable public static DataTable ReadDBF(string dbfFile) { DataTable dt = new DataTable(); // If there isn't even a file, just return an empty DataTable if ((false == File.Exists(dbfFile))) { return(dt); } BinaryReader br = null; try { // Read the header into a buffer br = new BinaryReader(File.OpenRead(dbfFile)); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != br.PeekChar())) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } // Create the columns in our new DataTable DataColumn col = null; foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': col = new DataColumn(field.fieldName, typeof(Int32)); break; case 'C': col = new DataColumn(field.fieldName, typeof(string)); break; case 'D': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'L': col = new DataColumn(field.fieldName, typeof(bool)); break; } dt.Columns.Add(col); } // Skip past the end of the header. ((FileStream)br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); // Declare all our locals here outside the loops BinaryReader recReader; string number; string year; string month; string day; DataRow row; // Read in all the records for (int counter = 0; counter <= header.numRecords - 1; counter++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // Loop through each field in a record row = dt.NewRow(); foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': // Number // We'll use a try/catch here in case it isn't a valid number number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); try { row[field.fieldName] = Int32.Parse(number); } catch { row[field.fieldName] = 0; } break; case 'C': // String row[field.fieldName] = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); break; case 'D': // Date (YYYYMMDD) year = Encoding.ASCII.GetString(recReader.ReadBytes(4)); month = Encoding.ASCII.GetString(recReader.ReadBytes(2)); day = Encoding.ASCII.GetString(recReader.ReadBytes(2)); row[field.fieldName] = System.DBNull.Value; try { if ((Int32.Parse(year) > 1900)) { row[field.fieldName] = new DateTime(Int32.Parse(year), Int32.Parse(month), Int32.Parse(day)); } } catch { } break; case 'L': // Boolean (Y/N) if ('Y' == recReader.ReadByte()) { row[field.fieldName] = true; } else { row[field.fieldName] = false; } break; } } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } } return(dt); }
public static DataTable ReadDBF(string dbfFile, ZipHelper _ziphelper, char DirSeperator) { long start = DateTime.Now.Ticks; DataTable dt = new DataTable(); BinaryReader recReader; DataRow row; int fieldIndex; // If there isn't even a file, just return an empty DataTable if ((false == _ziphelper.FileExists(dbfFile))) { return(dt); } BinaryReader br = null; openMemoFile(dbfFile, _ziphelper, DirSeperator); readMDXFile(dbfFile, _ziphelper, DirSeperator); //Dictionary<int, byte[]> memoLookup = ReadDBT(dbfFile); try { // Read the header into a buffer //br = new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); Stream tmpStream = _ziphelper.GetReadStream(dbfFile); br = new BinaryReader(tmpStream); byte[] completeBuffer = br.ReadBytes((int)_ziphelper.GetStreamLength(dbfFile, tmpStream)); tmpStream.Close(); br.Close(); br = new BinaryReader(new MemoryStream(completeBuffer)); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != br.PeekChar())) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } // Read in the first row of records, we need this to help determine column types below (br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // Create the columns in our new DataTable DataColumn col = null; dt.Columns.Add(new DataColumn("DELETED_FLAG", typeof(bool))); foreach (FieldDescriptor field in fields) { byte[] NumberByteArray = recReader.ReadBytes(field.fieldLen); switch (field.fieldType) { case dBaseType.N: if (dBaseConverter.N_IsDecimal(NumberByteArray)) { col = new DataColumn(field.fieldName, typeof(decimal)); } else { col = new DataColumn(field.fieldName, typeof(int)); } break; case dBaseType.C: col = new DataColumn(field.fieldName, typeof(string)); break; case dBaseType.T: col = new DataColumn(field.fieldName, typeof(DateTime)); break; case dBaseType.D: col = new DataColumn(field.fieldName, typeof(DateTime)); break; case dBaseType.L: col = new DataColumn(field.fieldName, typeof(bool)); break; case dBaseType.F: col = new DataColumn(field.fieldName, typeof(Double)); break; case dBaseType.M: //Field Type Memo... col = new DataColumn(field.fieldName, typeof(byte[])); break; } dt.Columns.Add(col); } // Skip past the end of the header. (br.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); // Read in all the records for (int counter = 0; counter <= header.numRecords - 1; counter++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) //if (recReader.ReadChar() == '*') //{ // continue; //} // Loop through each field in a record fieldIndex = 0; row = dt.NewRow(); char delflg = recReader.ReadChar(); if (delflg == '*') { row[0] = true; } else { row[0] = false; } foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case dBaseType.N: // Number byte[] NumberBytes = recReader.ReadBytes(field.fieldLen); if (dBaseConverter.N_IsDecimal(NumberBytes)) { row[fieldIndex + 1] = dBaseConverter.N_ToDecimal(NumberBytes); } else { row[fieldIndex + 1] = dBaseConverter.N_ToInt(NumberBytes); } break; case dBaseType.C: // String row[fieldIndex + 1] = dBaseConverter.C_ToString(recReader.ReadBytes(field.fieldLen)); break; case dBaseType.M: // Memo row[fieldIndex + 1] = ReadMemoBlock(dBaseConverter.N_ToInt(recReader.ReadBytes(field.fieldLen))); break; case dBaseType.D: // Date (YYYYMMDD) DateTime DTFromFile = dBaseConverter.D_ToDateTime(recReader.ReadBytes(8)); if (DTFromFile == DateTime.MinValue) { row[fieldIndex + 1] = System.DBNull.Value; } else { row[fieldIndex] = DTFromFile; } break; case dBaseType.T: row[fieldIndex + 1] = dBaseConverter.T_ToDateTime(recReader.ReadBytes(8)); break; case dBaseType.L: // Boolean (Y/N) row[fieldIndex + 1] = dBaseConverter.L_ToBool(recReader.ReadByte()); break; case dBaseType.F: row[fieldIndex + 1] = dBaseConverter.F_ToDouble(recReader.ReadBytes(field.fieldLen)); break; } fieldIndex++; } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } if (dbtReader != null) { dbtReader.Close(); dbtReader = null; } } long count = DateTime.Now.Ticks - start; return(dt); }
//This function should wite a MDX file with the specified indexes.! /* private static void writeMDXFile(string dbfFile, indexes) * { * * }*/ #endregion #region DBF-Write-Functions /// <summary> /// This Function Writes directly to a DBF File. /// It reads the Field list, and writes to the correct position. /// To access the deleted flag, use DELETED_FLAG as column Name /// </summary> /// <param name="dbfFile"></param> /// <param name="column"></param> /// <param name="row"></param> /// <param name="value"></param> /// <returns></returns> public static bool WriteValue(string dbfFile, string column, int row, object value, ZipHelper _ziphelper, char DirSeperator) { //if (zipfile != null) // throw new Exception("Write to Zipped Files is not supported!"); int BytesToRecordStart = 0; long start = DateTime.Now.Ticks; // If there isn't even a file, just return an empty DataTable if ((false == _ziphelper.FileExists(dbfFile))) { return(false); } BinaryReader br = null; BinaryWriter bw = null; try { // Read the header into a buffer Stream tmpStream = _ziphelper.GetReadStream(dbfFile); br = new BinaryReader(tmpStream); byte[] completeBuffer = br.ReadBytes((int)_ziphelper.GetStreamLength(dbfFile, tmpStream)); tmpStream.Close(); br.Close(); br = new BinaryReader(new MemoryStream(completeBuffer)); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != br.PeekChar())) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } char writeFieldType = ' '; int writeFieldLength = 0; foreach (FieldDescriptor field in fields) { writeFieldType = (char)field.fieldType; writeFieldLength = field.fieldLen; if (field.fieldName == column) { break; } BytesToRecordStart += field.fieldLen; } br.Close(); Stream strm = _ziphelper.GetWriteStream(dbfFile); bw = new BinaryWriter(strm); if (column != "DELETED_FLAG") { BytesToRecordStart++; } else { BytesToRecordStart = 0; } (/*(FileStream)*/ bw.BaseStream).Seek(header.headerLen + row * header.recordLen + BytesToRecordStart, SeekOrigin.Begin); if (column == "DELETED_FLAG") { if ((bool)value == true) { bw.Write(Encoding.ASCII.GetBytes("*")); } else { bw.Write(Encoding.ASCII.GetBytes(" ")); } } else { /* * number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); * switch (field.fieldType) * { * case (byte)'N': * if (number.IndexOf(".") > -1) * { * col = new DataColumn(field.fieldName, typeof(decimal)); * } * else * { * col = new DataColumn(field.fieldName, typeof(int)); * } * break; * case (byte)'C': * col = new DataColumn(field.fieldName, typeof(string)); * break; * case (byte)'T': * // You can uncomment this to see the time component in the grid * //col = new DataColumn(field.fieldName, typeof(string)); * col = new DataColumn(field.fieldName, typeof(DateTime)); * break; * case (byte)'D': * col = new DataColumn(field.fieldName, typeof(DateTime)); * break; * case (byte)'L': * col = new DataColumn(field.fieldName, typeof(bool)); * break; * case (byte)'F': * col = new DataColumn(field.fieldName, typeof(Double)); * break; * case (byte)'M': * //Field Type Memo... * col = new DataColumn(field.fieldName, typeof(byte[])); * //col = new DataColumn(field.fieldName, typeof(string)); * break; * }*/ switch (writeFieldType) { case 'N': bw.Write(Encoding.ASCII.GetBytes(value.ToString().PadLeft(writeFieldLength, ' '))); break; case 'C': bw.Write(Encoding.ASCII.GetBytes(value.ToString().PadRight(writeFieldLength, ' '))); break; default: //br.Close(); return(false); break; } } _ziphelper.WriteBackStream(dbfFile, strm); bw.Close(); } finally { if (br != null) { br.Close(); } if (bw != null) { bw.Close(); } } return(true); }
public static DataTable ReadDBF(string dbfFile, System.Text.Encoding readingEncoding) { byte[] hrhSuggestedCharMap = new byte[] { 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 161, 220, 190, 194, 198, 198, 199, 199, 200, 200, 129, 129, 202, 202, 203, 203, 204, 204, 141, 141, 205, 205, 206, 206, 207, 208, 209, 210, 142, 211, 211, 212, 212, 213, 213, 214, 214, 216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 217, 218, 218, 218, 218, 219, 219, 219, 219, 221, 221, 222, 222, 223, 223, 144, 144, 225, 225, 225, 227, 227, 228, 228, 230, 229, 229, 229, 237, 237, 237, 0 }; long start = DateTime.Now.Ticks; DataTable dt = new DataTable(); BinaryReader recReader; string number; string year; string month; string day; long lDate; long lTime; DataRow row; int fieldIndex; if ((false == File.Exists(dbfFile))) { return(dt); } bool bad = false; BinaryReader br = null; try { br = new BinaryReader(File.OpenRead(dbfFile), readingEncoding.CodePage == 1256 ? Encoding.ASCII : readingEncoding); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); ArrayList fields = new ArrayList(); while (true) { if (13 == br.PeekChar()) { break; } if (-1 == br.PeekChar()) { break; } buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } ((FileStream)br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer), readingEncoding.CodePage == 1256 ? Encoding.ASCII : readingEncoding); DataColumn col = null; foreach (FieldDescriptor field in fields) { number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); switch (field.fieldType) { case 'N': //if (number.IndexOf(".") > -1) { col = new DataColumn(field.fieldName, typeof(decimal)); } //else //{ // col = new DataColumn(field.fieldName, typeof(int)); //} break; case 'C': col = new DataColumn(field.fieldName, typeof(string)); break; case 'T': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'D': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'L': col = new DataColumn(field.fieldName, typeof(bool)); break; case 'F': col = new DataColumn(field.fieldName, typeof(Double)); break; } try { dt.Columns.Add(col); } catch (Exception ee1) { try { continue; } catch { throw new Exception(ee1.Message); } } } ((FileStream)br.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); for (int counter = 0; counter <= header.numRecords - 1; counter++) { buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer), readingEncoding.CodePage == 1256 ? Encoding.ASCII : readingEncoding); if (recReader.ReadChar() == '*') { continue; } fieldIndex = 0; row = dt.NewRow(); foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': // Number try { number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { //if (number.IndexOf(".") > -1) { row[fieldIndex] = decimal.Parse(number); } //else //{ // try // { // row[fieldIndex] = int.Parse(number); // } // catch // { // row[fieldIndex] = Convert.ToInt32(ulong.Parse(number)); // } //} } else { row[fieldIndex] = 0; } } catch { bad = true; } break; case 'C': // String if (readingEncoding.CodePage == 1256) { byte[] b = recReader.ReadBytes(field.fieldLen); byte[] b2 = new byte[b.Length]; for (int iii = 0; iii < b.Length; iii++) { b2[iii] = b[b.Length - iii - 1]; } string t = Encoding.GetEncoding(1256).GetString(b2); //row[fieldIndex] = t; /*t = t.Replace('Ç', 'ا').Replace('È', 'ب').Replace('Â', 'آ').Replace('Æ', 'ئ').Replace('Á', 'ء').Replace('í', 'ي').Replace('å', 'ه') * .Replace('', 'پ').Replace('Ê', 'ت').Replace('Ë', 'ث').Replace('Ì', 'ج').Replace('', 'چ').Replace('Í', 'ح').Replace('Î', 'خ') * .Replace('Ï', 'د').Replace('Ð', 'ذ').Replace('Ñ', 'ر').Replace('Ò', 'ز').Replace('Ž', 'ژ').Replace('Ó', 'س').Replace('Ô', 'ش') * .Replace('Õ', 'ص').Replace('Ö', 'ض').Replace('Ø', 'ط').Replace('Ù', 'ظ').Replace('Ú', 'ع').Replace('Û', 'غ').Replace('Ý', 'ف') * .Replace('Þ', 'ق').Replace('ß', 'ك').Replace('', 'گ').Replace('á', 'ل').Replace('ã', 'م').Replace('ä', 'ن').Replace('æ', 'و');*/ //checkIfIt'sPersianNumber //bool IsPersianNum = false; ////var firstLen = t.Length; //if (t.Length > 0) //{ // string IsNum = t.Replace("پ", "").Replace("€", "").Replace("‚", "").Replace("ƒ", ""). // Replace("„", "").Replace("…", "").Replace("†", "").Replace("‡", "").Replace("ˆ", "").Replace("‰", ""); // if (IsNum.Length == 0) IsPersianNum = true; // else if (IsNum == "//") IsPersianNum = true; //} //endOfPersianCheck var IsPersianNum = t.IsPersianNum(); if (IsPersianNum) { t = t.RevStr(); } t = t.Replace("پ", "1").Replace("چ", "آ").Replace("ژ", "ئ").Replace("ڈ", "ء").Replace("گ", "ا").Replace("‘", "ا").Replace("’", "ب ").Replace("“", "ب"). Replace("”", "پ ").Replace("•", "پ").Replace("–", "ت ").Replace("—", "ت").Replace("ک", "ث ").Replace("™", "ث").Replace("ڑ", "ج "). Replace("›", "ج").Replace("œ", "چ ").Replace("", "چ").Replace("", "ح ").Replace("ں", "ح").Replace(" ", "خ ").Replace("،", "خ"). Replace("¢", "د").Replace("£", "ذ").Replace("¤", "ر").Replace("¥", "ز").Replace("¦", "ژ").Replace("§", "س ").Replace("¨", "س"). Replace("©", "ش ").Replace("ھ", "ش").Replace("«", "ص ").Replace("¬", "ص").Replace("", "ض ").Replace("®", "ض").Replace("¯", "ط"). Replace("à", "ظ").Replace("ل", "ع ").Replace("â", "ع ").Replace("م", "ع").Replace("ن", "ع").Replace("ه", "غ ").Replace("و", "غ "). Replace("ç", "غ").Replace("è", "غ").Replace("é", "ف ").Replace("ê", "ف").Replace("ë", "ق ").Replace("ى", "ق").Replace("ي", "ك "). Replace("î", "ك").Replace("ï", "گ ").Replace("ً", "گ").Replace("ٌ", "ل ").Replace("ٍ", "لا").Replace("َ", "ل").Replace("ô", "م "). Replace("ُ", "م").Replace("ِ", "ن ").Replace("÷", "ن").Replace("ّ", "و").Replace("ù", "ه ").Replace("ْ", "ه").Replace("û", "ه"). Replace("ü", "? ").Replace("", "ي ").Replace("", "ي").Replace("€", "0").Replace("‚", "2").Replace("ƒ", "3"). Replace("„", "4").Replace("…", "5").Replace("†", "6").Replace("‡", "7").Replace("ˆ", "8").Replace("‰", "9").Replace("x", "x"). /*Replace("(", ")").Replace(")", "(").*/ Replace("-", "-").Replace("_", "_").Replace("‹", "-"); t = t.Replace(Convert.ToChar(63), 'ی'); if (!IsPersianNum && t.ContainsPersianNum()) { var mc = System.Text.RegularExpressions.Regex.Matches(t, @"[0-9]+"); foreach (var m in mc) { string ms = ((System.Text.RegularExpressions.Match)m).Value; if (ms.Last() == '-' && ms.Length > 1) { try { var indexOfms = t.IndexOf(ms); ms = t.Substring(indexOfms, ms.Length - 1); } catch { } } t = t.Replace(ms, ms.RevStr()); } //t = new String(RevArray); } string ret = ""; for (int j = 0; j < t.Length; j++) { if (t[j] == '(') { ret += ')'; } else if (t[j] == ')') { ret += '('; } else { ret += t[j]; } } row[fieldIndex] = ret; } else { var byteArr = recReader.ReadBytes(field.fieldLen); for (int i = 0; i < byteArr.Length; i++) { if (byteArr[i] > 127) { byteArr[i] = hrhSuggestedCharMap[byteArr[i] - 128]; } } var strVal4 = Encoding.Default.GetString(byteArr); char[] arrayRev = strVal4.ToCharArray(); Array.Reverse(arrayRev); var tt = new String(arrayRev); row[fieldIndex] = tt; } break; case 'D': // Date (YYYYMMDD) year = readingEncoding.GetString(recReader.ReadBytes(4)); month = readingEncoding.GetString(recReader.ReadBytes(2)); day = readingEncoding.GetString(recReader.ReadBytes(2)); row[fieldIndex] = System.DBNull.Value; try { if (IsNumber(year) && IsNumber(month) && IsNumber(day)) { if ((Int32.Parse(year) > 1900)) { row[fieldIndex] = new DateTime(Int32.Parse(year), Int32.Parse(month), Int32.Parse(day)); } } } catch { } break; case 'T': lDate = recReader.ReadInt32(); lTime = recReader.ReadInt32() * 10000L; row[fieldIndex] = JulianToDateTime(lDate).AddTicks(lTime); break; case 'L': // Boolean (Y/N) if ('Y' == recReader.ReadByte()) { row[fieldIndex] = true; } else { row[fieldIndex] = false; } break; case 'F': number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { row[fieldIndex] = double.Parse(number); } else { row[fieldIndex] = 0.0F; } break; } fieldIndex++; } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } } long count = DateTime.Now.Ticks - start; //if (bad) MessageBox.Show("ehtemal dadeye gomshode"); return(dt); }
// Read an entire standard DBF file into a DataTable // Read an entire standard DBF file into a DataTable public static DataTable ReadDBF(string dbfFile) { DataTable dt = new DataTable(); BinaryReader recReader; string number; string year; string month; string day; DataRow row; // If there isn't even a file, just return an empty DataTable if ((false == File.Exists(dbfFile))) { return(dt); } BinaryReader br = null; try { // Read the header into a buffer br = new BinaryReader(File.OpenRead(dbfFile)); br.BaseStream.Seek(29, SeekOrigin.Begin); //Seek to encoding flag Encoding _fileEncoding = GetDbaseLanguageDriver(br.ReadByte()); //Read and parse Language driver br.BaseStream.Seek(0, SeekOrigin.Begin); byte[] buffer = br.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); int NumberOfColumns = (header.headerLen - 31) / 32; for (int i = 0; i < NumberOfColumns; i++) { buffer = br.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); } // Read in the first row of records, we need this to help determine column types below ((FileStream)br.BaseStream).Seek(header.headerLen + 1, SeekOrigin.Begin); buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // Create the columns in our new DataTable DataColumn col = null; foreach (FieldDescriptor field in fields) { number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); switch (field.fieldType) { case 'N': if (number.IndexOf(".") > -1) { col = new DataColumn(field.fieldName, typeof(decimal)); } else { col = new DataColumn(field.fieldName, typeof(int)); } break; case 'C': col = new DataColumn(field.fieldName, typeof(string)); break; case 'D': col = new DataColumn(field.fieldName, typeof(DateTime)); break; case 'L': col = new DataColumn(field.fieldName, typeof(bool)); break; case 'F': col = new DataColumn(field.fieldName, typeof(Double)); break; } dt.Columns.Add(col); } // Skip past the end of the header. ((FileStream)br.BaseStream).Seek(header.headerLen, SeekOrigin.Begin); // Read in all the records for (int counter = 0; counter <= header.numRecords - 1; counter++) { // First we'll read the entire record into a buffer and then read each field from the buffer // This helps account for any extra space at the end of each record and probably performs better buffer = br.ReadBytes(header.recordLen); recReader = new BinaryReader(new MemoryStream(buffer)); // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) if (recReader.ReadChar() == '*') { continue; } // Loop through each field in a record row = dt.NewRow(); foreach (FieldDescriptor field in fields) { switch (field.fieldType) { case 'N': // Number // If you port this to .NET 2.0, use the Decimal.TryParse method number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); if (IsNumber(number)) { if (number.IndexOf(".") > -1) { row[field.fieldName] = decimal.Parse(number); } else { row[field.fieldName] = int.Parse(number); } } else { row[field.fieldName] = 0; } break; case 'C': // String string sv = _fileEncoding.GetString(recReader.ReadBytes(field.fieldLen)).Trim().Replace("'", "‘"); string actualValue = string.Empty; char[] valueArray = sv.ToCharArray(); foreach (char c in valueArray) { if (c == '\0') { break; } actualValue += c; } row[field.fieldName] = actualValue; break; case 'D': // Date (YYYYMMDD) year = Encoding.ASCII.GetString(recReader.ReadBytes(4)); month = Encoding.ASCII.GetString(recReader.ReadBytes(2)); day = Encoding.ASCII.GetString(recReader.ReadBytes(2)); row[field.fieldName] = System.DBNull.Value; try { if (IsNumber(year) && IsNumber(month) && IsNumber(day)) { if (year.Trim() == string.Empty) { year = "2000"; } if (month.Trim() == string.Empty) { month = "01"; } if (day.Trim() == string.Empty) { day = "01"; } if ((Int32.Parse(year) > 1900)) { row[field.fieldName] = new DateTime(Int32.Parse(year), Int32.Parse(month), Int32.Parse(day)); } } } catch { } break; case 'L': // Boolean (Y/N) if ('Y' == recReader.ReadByte()) { row[field.fieldName] = true; } else { row[field.fieldName] = false; } break; case 'F': number = Encoding.ASCII.GetString(recReader.ReadBytes(field.fieldLen)); float dv = 0; if (float.TryParse(number, out dv)) { row[field.fieldName] = double.Parse(number); } //if (IsNumber(number)) //{ // row[field.fieldName] = double.Parse(number); //} break; } } recReader.Close(); dt.Rows.Add(row); } } catch { throw; } finally { if (null != br) { br.Close(); } } return(dt); }
// Read an entire standard DBF stream into a DataTable public static DataTable WriteDBF(Stream dbfStream, string tableName, string path) { if (BeginSomeStep != null) { BeginSomeStep(className, new dbfCoreEventArgs("Обработка таблицы " + tableName + "...", "")); } //Console.WriteLine("Обработка таблицы {0}...", tableName); DataTable dt = new DataTable(); BinaryReader recReader; DataRow row; int fieldIndex; RecreateFile(path); dt.TableName = tableName; BinaryReader dbfReader = null; try { //br = new BinaryReader(new FileStream(dbfFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)); dbfReader = new BinaryReader(dbfStream); // Read the header into a buffer byte[] buffer = dbfReader.ReadBytes(Marshal.SizeOf(typeof(DBFHeader))); int tableRowNum = SQLHelper.GetRowCountFromTable(sqlParameters); byte[] rowNum = BitConverter.GetBytes(tableRowNum); for (int i = 0; i < rowNum.Length; i++) { buffer[4 + i] = rowNum[i]; } WriteToFile(buffer, path); // Marshall the header into a DBFHeader structure GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); DBFHeader header = (DBFHeader)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(DBFHeader)); handle.Free(); // Read in all the field descriptors. Per the spec, 13 (0D) marks the end of the field descriptors ArrayList fields = new ArrayList(); while ((13 != dbfReader.PeekChar())) { buffer = dbfReader.ReadBytes(Marshal.SizeOf(typeof(FieldDescriptor))); handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); fields.Add((FieldDescriptor)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(FieldDescriptor))); handle.Free(); WriteToFile(buffer, path); if (fields.Count > Int16.MaxValue) { return(null); } } //Вставим символ перехода на новую строку(к данным DBF) buffer = dbfReader.ReadBytes(1); WriteToFile(buffer, path); buffer = dbfReader.ReadBytes(1); int iteration = (tableRowNum / sqlParameters.rowsToSelect) + 1; using (var stream = new FileStream(path, FileMode.Append)) { for (int i = 0; i < iteration; i++) { //Получим набор данных, который будем заносить в DBF DataTable dataToWrite = SQLHelper.GetDataFromTable(sqlParameters, i); //Создаем строки для запими в DBF foreach (DataRow r in dataToWrite.Rows) { int position = 1; StringBuilder result = new StringBuilder(); result.Append((bool)r["IsDeleted"] ? "*" : " ", result.Length, 1); foreach (FieldDescriptor c in fields) { StringBuilder curVal = new StringBuilder(r[c.fieldName].ToString()); if (c.fieldType == dBaseType.D) { curVal = dBaseConverter.DateTime_ToD(curVal); } if (curVal.Length < c.fieldLen) { curVal.Insert(curVal.Length, " ", c.fieldLen - curVal.Length); } if (curVal.Length > c.fieldLen) { curVal.Remove(c.fieldLen, curVal.Length - c.fieldLen); } result.Append(curVal); position += c.fieldLen; //Console.WriteLine(r[c.fieldName] + "------" + c.fieldLen.ToString()); } if (result.Length == header.recordLen) { //WriteToFile(Encoding.GetEncoding(866).GetBytes(result.ToString()), path); stream.Write(Encoding.GetEncoding(866).GetBytes(result.ToString()), 0, result.Length); } else { Console.WriteLine("wr"); } } dataToWrite.Dispose(); } stream.Close(); } WriteToFile(buffer, path); } catch { throw; } finally { if (dbfReader != null) { dbfReader.Close(); dbfReader = null; } } return(null); }