Esempio n. 1
0
        public static System.Data.DataTable Read(string dbfFileName, string tableName)
        {
            System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open);

            System.IO.BinaryReader reader = new System.IO.BinaryReader(stream);

            byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader)));

            DbfHeader header = IRI.Msh.Common.Helpers.StreamHelper.ByteArrayToStructure <DbfHeader>(buffer);

            List <DbfFieldDescriptor> columns = new List <DbfFieldDescriptor>();

            if ((header.LengthOfHeader - 33) % 32 != 0)
            {
                throw new NotImplementedException();
            }

            int numberOfFields = (header.LengthOfHeader - 33) / 32;

            for (int i = 0; i < numberOfFields; i++)
            {
                buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor)));

                columns.Add(DbfFieldDescriptor.Parse(buffer, DbfFile._fieldsEncoding));
            }

            var mapFunctions = DbfFieldMappings.GetMappingFunctions(_currentEncoding, _correctFarsiCharacters);

            System.Data.DataTable result = MakeTableSchema(tableName, columns);

            ((FileStream)reader.BaseStream).Seek(header.LengthOfHeader, SeekOrigin.Begin);

            for (int i = 0; i < header.NumberOfRecords; i++)
            {
                // First we'll read the entire record into a buffer and then read each field from the buffer
                // This helps account for any extra space at the end of each record and probably performs better
                buffer = reader.ReadBytes(header.LengthOfEachRecord);

                BinaryReader recordReader = new BinaryReader(new MemoryStream(buffer));

                // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                if (recordReader.ReadChar() == '*')
                {
                    continue;
                }

                object[] values = new object[columns.Count];

                for (int j = 0; j < columns.Count; j++)
                {
                    int fieldLenth = columns[j].Length;

                    values[j] = mapFunctions[columns[j].Type](recordReader.ReadBytes(fieldLenth));
                }

                recordReader.Close();

                result.Rows.Add(values);
            }

            reader.Close();

            stream.Close();

            return(result);
        }
Esempio n. 2
0
        //public static List<Dictionary<string, object>> Read(string dbfFileName, bool correctFarsiCharacters = true, Encoding dataEncoding = null, Encoding fieldHeaderEncoding = null)
        public static EsriAttributeDictionary Read(string dbfFileName, bool correctFarsiCharacters = true, Encoding dataEncoding = null, Encoding fieldHeaderEncoding = null)
        {
            dataEncoding = dataEncoding ?? (TryDetectEncoding(dbfFileName) ?? Encoding.UTF8);

            ChangeEncoding(dataEncoding);

            //if (tryDetectEncoding)
            //{
            //    Encoding encoding = TryDetectEncoding(dbfFileName) ?? dataEncoding;

            //    ChangeEncoding(encoding);
            //}
            //else
            //{
            //    ChangeEncoding(dataEncoding);
            //}

            DbfFile._fieldsEncoding = fieldHeaderEncoding ?? DbfFile._arabicEncoding;

            DbfFile._correctFarsiCharacters = correctFarsiCharacters;

            System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open);

            System.IO.BinaryReader reader = new System.IO.BinaryReader(stream);

            byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader)));

            DbfHeader header = IRI.Msh.Common.Helpers.StreamHelper.ByteArrayToStructure <DbfHeader>(buffer);

            List <DbfFieldDescriptor> fields = new List <DbfFieldDescriptor>();

            if ((header.LengthOfHeader - 33) % 32 != 0)
            {
                throw new NotImplementedException();
            }

            int numberOfFields = (header.LengthOfHeader - 33) / 32;

            for (int i = 0; i < numberOfFields; i++)
            {
                buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor)));

                fields.Add(DbfFieldDescriptor.Parse(buffer, DbfFile._fieldsEncoding));
            }

            var _mapFunctions = DbfFieldMappings.GetMappingFunctions(_currentEncoding, _correctFarsiCharacters);

            //System.Data.DataTable result = MakeTableSchema(tableName, columns);

            var attributes = new List <Dictionary <string, object> >(header.NumberOfRecords);

            ((FileStream)reader.BaseStream).Seek(header.LengthOfHeader, SeekOrigin.Begin);

            for (int i = 0; i < header.NumberOfRecords; i++)
            {
                // First we'll read the entire record into a buffer and then read each field from the buffer
                // This helps account for any extra space at the end of each record and probably performs better
                buffer = reader.ReadBytes(header.LengthOfEachRecord);
                BinaryReader recordReader = new BinaryReader(new MemoryStream(buffer));

                // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                if (recordReader.ReadChar() == '*')
                {
                    continue;
                }

                Dictionary <string, object> values = new Dictionary <string, object>();

                for (int j = 0; j < fields.Count; j++)
                {
                    int fieldLenth = fields[j].Length;

                    //values[j] = MapFunction[columns[j].Type](recordReader.ReadBytes(fieldLenth));
                    values.Add(fields[j].Name, _mapFunctions[fields[j].Type](recordReader.ReadBytes(fieldLenth)));
                }

                recordReader.Close();

                attributes.Add(values);
            }

            reader.Close();

            stream.Close();

            return(new EsriAttributeDictionary(attributes, fields));
        }