コード例 #1
0
        //Write
        public static void Write(string fileName, System.Data.DataTable table, Encoding encoding, bool overwrite = false)
        {
            var mode = Shapefile.GetMode(fileName, overwrite);

            System.IO.Stream stream = new System.IO.FileStream(fileName, mode);

            System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);

            List <DbfFieldDescriptor> columns = MakeDbfFields(table.Columns);

            DbfHeader header = new DbfHeader(table.Rows.Count, table.Columns.Count, GetRecordLength(columns), encoding);

            writer.Write(IRI.Msh.Common.Helpers.StreamHelper.StructureToByteArray(header));

            foreach (var item in columns)
            {
                writer.Write(IRI.Msh.Common.Helpers.StreamHelper.StructureToByteArray(item));
            }

            //Terminator
            writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber));

            for (int i = 0; i < table.Rows.Count; i++)
            {
                // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber));

                for (int j = 0; j < table.Columns.Count; j++)
                {
                    // 1400.02.03-comment
                    //byte[] temp = new byte[columns[j].Length];

                    string value = table.Rows[i][j].ToString().Trim();

                    ////encoding.GetBytes(value, 0, value.Length, temp, 0);
                    ////writer.Write(temp);

                    // 1400.02.03-comment
                    //writer.Write(GetBytes(value, temp, encoding));

                    writer.Write(DbfFieldMappings.Encode(value, columns[j].Length, encoding));
                }
            }

            //End of file
            writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber));

            writer.Close();

            stream.Close();
        }
コード例 #2
0
        public static void Write <T>(string dbfFileName,
                                     IEnumerable <T> values,
                                     List <ObjectToDbfTypeMap <T> > mapping,
                                     Encoding encoding,
                                     bool overwrite = false)
        {
            var columns = mapping.Select(m => m.FieldType).ToList();

            int control = 0;

            try
            {
                //if (columns.Count != mapping.Count)
                //{
                //    throw new NotImplementedException();
                //}

                var mode = Shapefile.GetMode(dbfFileName, overwrite);

                System.IO.Stream stream = new System.IO.FileStream(dbfFileName, mode);

                System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream);

                DbfHeader header = new DbfHeader(values.Count(), mapping.Count, GetRecordLength(columns), encoding);

                writer.Write(IRI.Msh.Common.Helpers.StreamHelper.StructureToByteArray(header));

                foreach (var item in columns)
                {
                    writer.Write(IRI.Msh.Common.Helpers.StreamHelper.StructureToByteArray(item));
                }

                //Terminator
                writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber));

                for (int i = 0; i < values.Count(); i++)
                {
                    control = i;
                    // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                    writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber));

                    for (int j = 0; j < mapping.Count; j++)
                    {
                        // 1400.02.03-comment
                        //byte[] temp = new byte[columns[j].Length];

                        object value = mapping[j].MapFunction(values.ElementAt(i));

                        var temp = DbfFieldMappings.Encode(value, columns[j].Length, encoding);

                        // 1400.02.03-comment
                        //if (value is DateTime dt)
                        //{
                        //    value = dt.ToString("yyyyMMdd");
                        //}

                        //if (value != null)
                        //{
                        //    //encoding.GetBytes(value.ToString(), 0, value.ToString().Length, temp, 0);
                        //    temp = GetBytes(value.ToString(), temp, encoding);
                        //}

                        ////string tt = encoding.GetString(temp);
                        ////var le = tt.Length;
                        writer.Write(temp);
                    }
                }

                //End of file
                writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber));

                writer.Close();

                stream.Close();

                System.IO.File.WriteAllText(Shapefile.GetCpgFileName(dbfFileName), encoding.BodyName);
            }
            catch (Exception ex)
            {
                string message = ex.Message;

                string m2 = message + " " + control.ToString();
            }
        }
コード例 #3
0
        public static System.Data.DataTable Read(string dbfFileName, string tableName)
        {
            System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open);

            System.IO.BinaryReader reader = new System.IO.BinaryReader(stream);

            byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader)));

            DbfHeader header = IRI.Msh.Common.Helpers.StreamHelper.ByteArrayToStructure <DbfHeader>(buffer);

            List <DbfFieldDescriptor> columns = new List <DbfFieldDescriptor>();

            if ((header.LengthOfHeader - 33) % 32 != 0)
            {
                throw new NotImplementedException();
            }

            int numberOfFields = (header.LengthOfHeader - 33) / 32;

            for (int i = 0; i < numberOfFields; i++)
            {
                buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor)));

                columns.Add(DbfFieldDescriptor.Parse(buffer, DbfFile._fieldsEncoding));
            }

            var mapFunctions = DbfFieldMappings.GetMappingFunctions(_currentEncoding, _correctFarsiCharacters);

            System.Data.DataTable result = MakeTableSchema(tableName, columns);

            ((FileStream)reader.BaseStream).Seek(header.LengthOfHeader, SeekOrigin.Begin);

            for (int i = 0; i < header.NumberOfRecords; i++)
            {
                // First we'll read the entire record into a buffer and then read each field from the buffer
                // This helps account for any extra space at the end of each record and probably performs better
                buffer = reader.ReadBytes(header.LengthOfEachRecord);

                BinaryReader recordReader = new BinaryReader(new MemoryStream(buffer));

                // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                if (recordReader.ReadChar() == '*')
                {
                    continue;
                }

                object[] values = new object[columns.Count];

                for (int j = 0; j < columns.Count; j++)
                {
                    int fieldLenth = columns[j].Length;

                    values[j] = mapFunctions[columns[j].Type](recordReader.ReadBytes(fieldLenth));
                }

                recordReader.Close();

                result.Rows.Add(values);
            }

            reader.Close();

            stream.Close();

            return(result);
        }
コード例 #4
0
        //public static List<Dictionary<string, object>> Read(string dbfFileName, bool correctFarsiCharacters = true, Encoding dataEncoding = null, Encoding fieldHeaderEncoding = null)
        public static EsriAttributeDictionary Read(string dbfFileName, bool correctFarsiCharacters = true, Encoding dataEncoding = null, Encoding fieldHeaderEncoding = null)
        {
            dataEncoding = dataEncoding ?? (TryDetectEncoding(dbfFileName) ?? Encoding.UTF8);

            ChangeEncoding(dataEncoding);

            //if (tryDetectEncoding)
            //{
            //    Encoding encoding = TryDetectEncoding(dbfFileName) ?? dataEncoding;

            //    ChangeEncoding(encoding);
            //}
            //else
            //{
            //    ChangeEncoding(dataEncoding);
            //}

            DbfFile._fieldsEncoding = fieldHeaderEncoding ?? DbfFile._arabicEncoding;

            DbfFile._correctFarsiCharacters = correctFarsiCharacters;

            System.IO.Stream stream = new System.IO.FileStream(dbfFileName, System.IO.FileMode.Open);

            System.IO.BinaryReader reader = new System.IO.BinaryReader(stream);

            byte[] buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfHeader)));

            DbfHeader header = IRI.Msh.Common.Helpers.StreamHelper.ByteArrayToStructure <DbfHeader>(buffer);

            List <DbfFieldDescriptor> fields = new List <DbfFieldDescriptor>();

            if ((header.LengthOfHeader - 33) % 32 != 0)
            {
                throw new NotImplementedException();
            }

            int numberOfFields = (header.LengthOfHeader - 33) / 32;

            for (int i = 0; i < numberOfFields; i++)
            {
                buffer = reader.ReadBytes(Marshal.SizeOf(typeof(DbfFieldDescriptor)));

                fields.Add(DbfFieldDescriptor.Parse(buffer, DbfFile._fieldsEncoding));
            }

            var _mapFunctions = DbfFieldMappings.GetMappingFunctions(_currentEncoding, _correctFarsiCharacters);

            //System.Data.DataTable result = MakeTableSchema(tableName, columns);

            var attributes = new List <Dictionary <string, object> >(header.NumberOfRecords);

            ((FileStream)reader.BaseStream).Seek(header.LengthOfHeader, SeekOrigin.Begin);

            for (int i = 0; i < header.NumberOfRecords; i++)
            {
                // First we'll read the entire record into a buffer and then read each field from the buffer
                // This helps account for any extra space at the end of each record and probably performs better
                buffer = reader.ReadBytes(header.LengthOfEachRecord);
                BinaryReader recordReader = new BinaryReader(new MemoryStream(buffer));

                // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space)
                if (recordReader.ReadChar() == '*')
                {
                    continue;
                }

                Dictionary <string, object> values = new Dictionary <string, object>();

                for (int j = 0; j < fields.Count; j++)
                {
                    int fieldLenth = fields[j].Length;

                    //values[j] = MapFunction[columns[j].Type](recordReader.ReadBytes(fieldLenth));
                    values.Add(fields[j].Name, _mapFunctions[fields[j].Type](recordReader.ReadBytes(fieldLenth)));
                }

                recordReader.Close();

                attributes.Add(values);
            }

            reader.Close();

            stream.Close();

            return(new EsriAttributeDictionary(attributes, fields));
        }