private DbfHeader ReadHeader(BinaryReader br) { var header = new DbfHeader(); // Version header.Version = br.ReadByte(); // YYMMDD int yy = br.ReadByte(); yy += (yy < 70) ? 2000 : 1900; int mm = br.ReadByte(); int dd = br.ReadByte(); header.LastUpdate = new DateTime(yy, mm, dd); // Records header.RecordCount = br.ReadInt32(); header.HeaderLength = br.ReadInt16(); header.RecordLength = br.ReadInt16(); ReadReservedByte(); ReadReservedByte(); ReadReservedByte("Incomplete transaction"); ReadReservedByte("Encryption"); ReadReservedBytes(12); br.ReadByte(); // production mdx br.ReadByte(); // language driver id ReadReservedByte(); ReadReservedByte(); return(header); }
private void ReadHeader() { Header = ToStructure <DbfHeader>(BinaryReader.ReadBytes(32)); if ((Header.Signature & 7) > 3) { throw new DbfReaderException("DBF file has unsuppurted version"); } }
private static string GenerateCreateTableQuery(ShapefileDataReader reader, string tableName) { StringBuilder bldr = new StringBuilder(); bldr.Append("CREATE TABLE ["); bldr.Append(tableName); bldr.Append("] ([Id_"); bldr.Append(tableName); bldr.Append("] [int] IDENTITY(1,1) NOT NULL, [Geom] [geometry] NOT NULL, "); DbfHeader header = reader.DbfHeader; for (int i = 0; i < header.Count; i++) { DbfColumn col = header[i]; bldr.Append('['); bldr.Append(col.Name); bldr.Append("] "); switch (col.Type) { case DbfColumnType.Character: bldr.Append("[varchar]("); bldr.Append(col.Length); bldr.Append(")"); break; case DbfColumnType.Float: case DbfColumnType.Number: bldr.Append("[float]"); break; case DbfColumnType.Boolean: bldr.Append("[bit]"); break; case DbfColumnType.Date: bldr.Append("[datetime]"); break; default: throw new Exception(String.Format("Column type '{0}' is not supported.", (char)col.Type)); } bldr.Append(" NULL, "); } bldr.Append("CONSTRAINT [PK_"); bldr.Append(tableName); bldr.Append("] PRIMARY KEY CLUSTERED ([Id_"); bldr.Append(tableName); bldr.Append("] ASC) WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON))"); return(bldr.ToString()); }
public static void Combine(string finalShape, string projection, params CombineShapefile[] combineShapes) { DbfHeader dbfHeader = new DbfHeader(); dbfHeader.AddCharacter("Label", 80); ShapefileHeader shapeHeader = ShapefileHeader.CreateEmpty(ShapefileGeometryType.Polygon); GeometryFactory gf = new GeometryFactory(); using (ShapefileDataWriter writer = ShapefileDataWriter.Create(finalShape, dbfHeader, shapeHeader)) { // Write the projection file. File.WriteAllText(Path.ChangeExtension(finalShape, ".prj"), projection); foreach (CombineShapefile workerShp in combineShapes) { GeometryTransform transform = GeometryTransform.GetTransform(workerShp.FilePath, projection); using (ShapefileIndexReader index = new ShapefileIndexReader(Path.ChangeExtension(workerShp.FilePath, ".shx"))) { if (transform != null) { writer.Header.Bounds.ExpandToInclude(transform.Apply(index.Header.Bounds)); } else { writer.Header.Bounds.ExpandToInclude(index.Header.Bounds); } Task[] tasks = new Task[Environment.ProcessorCount]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Factory.StartNew(() => { using (ShapefileBlockReader reader = new ShapefileBlockReader(workerShp.FilePath, index, gf, transform)) { while (reader.Read()) { writer.Write(reader.Geometry, reader.Record.GetString(workerShp.Label)); } } }); } Task.WaitAll(tasks); writer.Flush(); } } } }
/// <summary> /// open a dbf file /// </summary> /// <param name="filename"></param> public void OpenFile(string filename) { Header = new DbfHeader(); Header._SubRecords = new List <DbfSubrecord>(); DataTable = new DataTable(); ReadHeader(filename); ReadData(filename); //// dump for debugging purposes. //DataSet ds = new DataSet(); //ds.Tables.Add(dbf.DataTable); //ds.WriteXml(@"c:\foo.xml"); }
public static ShapefileDataWriter Create(string path, DbfHeader dbfHeader, ShapefileHeader shapeHeader) { ShapefileDataWriter writer = new ShapefileDataWriter(path, shapeHeader, FileMode.CreateNew, FileAccess.Write); writer._writerShape.BaseStream.Seek(100L, SeekOrigin.Begin); writer._writerIndex.BaseStream.Seek(100L, SeekOrigin.Begin); writer._dbf = DbfFile.Create(Path.ChangeExtension(path, ".dbf"), dbfHeader); writer._currentRecord = new DbfRecord(dbfHeader); writer._recordNumber = 1; writer._filePos = 50; return(writer); }
public DbfFileStream(string filename, int codepage) { if (codepage != 0) { System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance); textEncoding = Encoding.GetEncoding(codepage); } else { textEncoding = Encoding.ASCII; } fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read); binaryReader = new BinaryReader(fileStream); try { header = ReadHeader(binaryReader); } catch (Exception e) { throw new Exception("Failed to read header", e); } var fieldDescriptors = new List <DbfFieldDescriptor>(); try { int no = 0; while (true) { var fieldDescriptor = ReadFieldDescriptor(binaryReader, no++); if (fieldDescriptor == null) { break; } fieldDescriptors.Add(fieldDescriptor); } } catch (Exception e) { throw new Exception("Failed to read field descriptors", e); } // Read remainder of header int bytesRead = 32 + (32 * fieldDescriptors.Count) + 1; binaryReader.ReadBytes(header.HeaderLength - bytesRead); this.fieldDescriptors = fieldDescriptors; }
private static void DbfTestRead(string filepath) { //编码Encoding.UTF8 中文字符占三字节 Encoding.GetEncoding(936)中文字符占二字节 var odbf = new DbfFile(Encoding.GetEncoding(936));//编码 odbf.Open(filepath, FileMode.Open); DbfHeader header = odbf.Header; Console.WriteLine(header.ColumnCount); //List<User> list = new List<User>(); //for (int i = 0; i < header.RecordCount; i++) //{ // DbfRecord record = odbf.Read(i); // if (record.ColumnCount >= 5) // { // User item = new User(); // item.Address = record["地址"].Trim();//record.Column(record.FindColumn("编号")); // item.date = record.GetDateValue(record.FindColumn("时间")); // item.money = Convert.ToDecimal(record["余额"]); // item.UserCode = record["编号"].Trim(); // item.UserName = record["名称"].Trim(); // list.Add(item); // } //} List <MapPolygon> list = new List <MapPolygon>(); for (int i = 0; i < header.RecordCount; i++) { DbfRecord record = odbf.Read(i); if (record.ColumnCount >= 5) { MapPolygon item = new MapPolygon(); item.Code = record[record.FindColumEx("图斑编码")].Trim(); // record["图斑编码"].Trim();//record.Column(record.FindColumn("编号")); item.Mark = record[record.FindColumEx("措施代码")].Trim(); //record["措施代码"].Trim(); item.Name = record[record.FindColumEx("措施名称")].Trim(); //record["措施名称"].Trim(); item.State = record[record.FindColumEx("利用现状")].Trim(); //record["利用现状"].Trim(); //item.Number =Convert.ToSingle( record["措施数量"]); //item.Slope = Convert.ToSingle(record["坡度"]); list.Add(item); } } Console.Read(); }
//从源文件获取字段信息 public List <R_FieldInf> GetFieldInfs(string path) { List <R_FieldInf> FieldInfs = new List <R_FieldInf>(); string dbfpath = path; #region 该部分使用FastDBF获取字段名称 返回List<string> fieldNames DbfFile dbf = new DbfFile(Encoding.Default); dbf.Open(dbfpath, FileMode.Open); DbfHeader dh = dbf.Header; List <string> fieldNames = new List <string>(); int fieldCount = dh.ColumnCount; for (int index = 0; index < fieldCount; index++) { fieldNames.Add(dh[index].Name); } dbf.Close(); #endregion #region 该部分使用Shapelib获取字段类型 返回List<string> fieldTypes //获取字段类型 IntPtr hDbf = ShapeLib.DBFOpen(dbfpath, "rb+");//"rb"(只读)"rb+"(读/写) int pointCount = ShapeLib.DBFGetRecordCount(hDbf); List <string> fieldTypes = new List <string>(); StringBuilder stringBuilder = new StringBuilder(20); int pnWidth = 10; int pnDecimals = 10; for (int index = 0; index < fieldCount; index++) { string type = TypeConvert(ShapeLib.DBFGetFieldInfo(hDbf, index, stringBuilder, ref pnWidth, ref pnDecimals).ToString()); fieldTypes.Add(type); } ShapeLib.DBFClose(hDbf); #endregion //实例化类型 for (int index = 0; index < fieldCount; index++) { FieldInfs.Add(new R_FieldInf(fieldNames[index], fieldTypes[index])); } return(FieldInfs); }
public static void Convert(ShapefileGeometryType type, SqlDataReader reader, string shapefile) { if (!reader.Read()) { throw new Exception("No Results found"); } int geomOrdinal, colCount; ShapefileHeader shapeHeader = ShapefileHeader.CreateEmpty(type); DbfHeader dbfHeader = BuildHeader(reader, out geomOrdinal, out colCount); GeometryFactory factory = new GeometryFactory(); Envelope env = shapeHeader.Bounds; using (ShapefileDataWriter writer = ShapefileDataWriter.Create(shapefile, dbfHeader, shapeHeader)) { do { SqlGeometry geom = reader[geomOrdinal] as SqlGeometry; if (!geom.STIsValid()) { geom = geom.MakeValid(); } for (int i = 0, offset = 0; i < colCount; i++) { if (i == geomOrdinal) { offset++; } writer.Record.SetRaw(i, reader[i + offset]); } ExpandEnv(env, geom.STBoundary()); writer.Write(ConvertToGeometry.SqlGeometryToGeometry(geom, factory)); }while (reader.Read()); } }
private static DbfHeader BuildHeader(SqlDataReader reader, out int geomOrdinal, out int colCount) { DbfHeader dbfHeader = new DbfHeader(); DataTable schema = reader.GetSchemaTable(); geomOrdinal = -1; colCount = schema.Rows.Count - 1; foreach (DataRow row in schema.Rows) { int oridinal = (int)row["ColumnOrdinal"]; int size = (int)row["ColumnSize"]; string name = row["ColumnName"] as string; switch ((row["DataType"] as Type).Name) { case "String": dbfHeader.AddCharacter(name, (byte)size); break; case "SqlGeometry": geomOrdinal = oridinal; break; default: throw new Exception(String.Format("'{0}' is not a recognized data type", (row["DataType"] as Type).Name)); } } if (geomOrdinal == -1) { throw new Exception("Geometry column was not found"); } return(dbfHeader); }
//Экспорт таблицы 4 в dbf public static bool ExportUnionReportT4ToDbf(string path, List <ExportUnionReportT4> export, string fileNameDbf, out string error) { error = string.Empty; if (path == string.Empty) { error = "path == string.Empty"; return(false); } if (!Directory.Exists(path)) { error = "!Directory.Exists(path)"; return(false); } if (export == null) { error = "export == null"; return(false); } string pathToTable = path + "\\" + fileNameDbf + ".dbf"; try { File.Delete(pathToTable); } catch (Exception exc) { error = exc.Message; return(false); } var encoding = Encoding.GetEncoding(1251); var stream = new FileStream(pathToTable, FileMode.Create); var writer = new BinaryWriter(stream); var columns = GetFieldDbfUnionReportT4(); var header = new DbfHeader(export.Count, columns.Count, DbfFileFormat.GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (DbfFieldDescriptor item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); foreach (var record in export) { // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); byte[] temp = null; string value = null; //1 PERIOD_M temp = new byte[columns.Find(rec => rec.Name == "PERIOD_M").Length]; value = record.PERIOD_M.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //2 PERIOD_M temp = new byte[columns.Find(rec => rec.Name == "PERIOD_Y").Length]; value = record.PERIOD_Y.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //3 UKR_GROMAD temp = new byte[columns.Find(rec => rec.Name == "UKR_GROMAD").Length]; value = record.UKR_GROMAD.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //4 NUMIDENT temp = new byte[columns.Find(rec => rec.Name == "NUMIDENT").Length]; value = record.NUMIDENT; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //5 LN temp = new byte[columns.Find(rec => rec.Name == "LN").Length]; value = record.LN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //6 NM temp = new byte[columns.Find(rec => rec.Name == "NM").Length]; value = record.NM; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //7 FTN temp = new byte[columns.Find(rec => rec.Name == "FTN").Length]; value = record.FTN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //8 C_PID temp = new byte[columns.Find(rec => rec.Name == "C_PID").Length]; value = record.C_PID; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //9 START_DT temp = new byte[columns.Find(rec => rec.Name == "START_DT").Length]; value = record.START_DT.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //10 STOP_DT temp = new byte[columns.Find(rec => rec.Name == "STOP_DT").Length]; value = record.STOP_DT.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //11 DAYS temp = new byte[columns.Find(rec => rec.Name == "DAYS").Length]; value = record.DAYS.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //12 HH temp = new byte[columns.Find(rec => rec.Name == "HH").Length]; value = record.HH.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //13 MM temp = new byte[columns.Find(rec => rec.Name == "MM").Length]; value = record.MM.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 NORMA_1 temp = new byte[columns.Find(rec => rec.Name == "NORMA_1").Length]; value = record.NORMA_1.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //15 NORMA_2 temp = new byte[columns.Find(rec => rec.Name == "NORMA_2").Length]; value = record.NORMA_2.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //16 NORMA_3 temp = new byte[columns.Find(rec => rec.Name == "NORMA_3").Length]; value = record.NORMA_3.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //17 NUM_NAK temp = new byte[columns.Find(rec => rec.Name == "NUM_NAK").Length]; value = record.NUM_NAK; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //18 DT_NAK temp = new byte[columns.Find(rec => rec.Name == "DT_NAK").Length]; value = record.DT_NAK.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //19 SEAZON temp = new byte[columns.Find(rec => rec.Name == "SEAZON").Length]; value = record.SEAZON.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //20 OZN temp = new byte[columns.Find(rec => rec.Name == "OZN").Length]; value = record.OZN.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); } writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); return(true); }
//Экспорт таблицы 2 в dbf public static bool ExportUnionReportT2ToDbf(string path, List <ExportUnionReportT2> export, string fileNameDbf, out string error) { error = string.Empty; if (path == string.Empty) { error = "path == string.Empty"; return(false); } if (!Directory.Exists(path)) { error = "!Directory.Exists(path)"; return(false); } if (export == null) { error = "export == null"; return(false); } string pathToTable = path + "\\" + fileNameDbf + ".dbf"; try { File.Delete(pathToTable); } catch (Exception exc) { error = exc.Message; return(false); } var encoding = Encoding.GetEncoding(1251); var stream = new FileStream(pathToTable, FileMode.Create); var writer = new BinaryWriter(stream); var columns = GetFieldDbfUnionReportT2(); var header = new DbfHeader(export.Count, columns.Count, DbfFileFormat.GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (DbfFieldDescriptor item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); foreach (var record in export) { writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); byte[] temp = null; string value = null; //1 NP temp = new byte[columns.Find(rec => rec.Name == "NP").Length]; value = record.NP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //2 PERIOD temp = new byte[columns.Find(rec => rec.Name == "PERIOD").Length]; value = record.PERIOD.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //3 RIK temp = new byte[columns.Find(rec => rec.Name == "RIK").Length]; value = record.RIK.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //4 KOD temp = new byte[columns.Find(rec => rec.Name == "KOD").Length]; value = record.KOD; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //5 TYP temp = new byte[columns.Find(rec => rec.Name == "TYP").Length]; value = record.TYP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //6 TIN temp = new byte[columns.Find(rec => rec.Name == "TIN").Length]; value = record.TIN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //7 S_NAR temp = new byte[columns.Find(rec => rec.Name == "S_NAR").Length]; value = record.S_NAR.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //8 S_DOX temp = new byte[columns.Find(rec => rec.Name == "S_DOX").Length]; value = record.S_DOX.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //9 S_TAXN temp = new byte[columns.Find(rec => rec.Name == "S_TAXN").Length]; value = record.S_TAXN.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //10 S_TAXP temp = new byte[columns.Find(rec => rec.Name == "S_TAXP").Length]; value = record.S_TAXP.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //11 OZN_DOX temp = new byte[columns.Find(rec => rec.Name == "OZN_DOX").Length]; value = record.OZN_DOX.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //12 D_PRIYN temp = new byte[columns.Find(rec => rec.Name == "D_PRIYN").Length]; value = record.D_PRIYN.ToString("yyyyMMdd"); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); ////13 D_ZVILN temp = new byte[columns.Find(rec => rec.Name == "D_ZVILN").Length]; value = record.D_ZVILN.ToString("yyyyMMdd"); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 OZN_PILG temp = new byte[columns.Find(rec => rec.Name == "OZN_PILG").Length]; value = record.OZN_PILG.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //15 OZNAKA temp = new byte[columns.Find(rec => rec.Name == "OZNAKA").Length]; value = record.OZNAKA.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //16 A051 temp = new byte[columns.Find(rec => rec.Name == "A051").Length]; value = record.A051.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //17 A05 temp = new byte[columns.Find(rec => rec.Name == "A05").Length]; value = record.A05.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); } writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); return(true); }
//Экспорт таблицы 1 в dbf public static bool ExportUnionReportT1ToDbf(string path, List <ExportUnionReportT1> export, string fileNameDbf, out string error) { error = string.Empty; if (path == string.Empty) { error = "path == string.Empty"; return(false); } if (!Directory.Exists(path)) { error = "!Directory.Exists(path)"; return(false); } if (export == null) { error = "export == null"; return(false); } string pathToTable = path + "\\" + fileNameDbf + ".dbf"; try { File.Delete(pathToTable); } catch (Exception exc) { error = exc.Message; return(false); } var encoding = Encoding.GetEncoding(1251); var stream = new FileStream(pathToTable, FileMode.Create); var writer = new BinaryWriter(stream); var columns = GetFieldDbfUnionReportT1(); var header = new DbfHeader(export.Count, columns.Count, DbfFileFormat.GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (DbfFieldDescriptor item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); foreach (var record in export) { // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); byte[] temp = null; string value = null; //1 PERIOD_M temp = new byte[columns.Find(rec => rec.Name == "PERIOD_M").Length]; value = record.PERIOD_M.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //2 PERIOD_M temp = new byte[columns.Find(rec => rec.Name == "PERIOD_Y").Length]; value = record.PERIOD_Y.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //3 UKR_GROMAD temp = new byte[columns.Find(rec => rec.Name == "UKR_GROMAD").Length]; value = record.UKR_GROMAD.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //4 ST temp = new byte[columns.Find(rec => rec.Name == "ST").Length]; value = record.ST.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //5 NUMIDENT temp = new byte[columns.Find(rec => rec.Name == "NUMIDENT").Length]; value = record.NUMIDENT; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //6 LN temp = new byte[columns.Find(rec => rec.Name == "LN").Length]; value = record.LN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //7 NM temp = new byte[columns.Find(rec => rec.Name == "NM").Length]; value = record.NM; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //8 NM temp = new byte[columns.Find(rec => rec.Name == "FTN").Length]; value = record.FTN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //9 ZO temp = new byte[columns.Find(rec => rec.Name == "ZO").Length]; value = record.ZO.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //10 PAY_TP temp = new byte[columns.Find(rec => rec.Name == "PAY_TP").Length]; value = record.PAY_TP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //11 PAY_TP temp = new byte[columns.Find(rec => rec.Name == "PAY_MNTH").Length]; value = record.PAY_MNTH.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //12 PAY_YEAR temp = new byte[columns.Find(rec => rec.Name == "PAY_YEAR").Length]; value = record.PAY_YEAR.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //13 KD_NP temp = new byte[columns.Find(rec => rec.Name == "KD_NP").Length]; value = record.KD_NP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 KD_NZP temp = new byte[columns.Find(rec => rec.Name == "KD_NZP").Length]; value = record.KD_NZP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 KD_PTV temp = new byte[columns.Find(rec => rec.Name == "KD_PTV").Length]; value = record.KD_PTV.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 KD_VP temp = new byte[columns.Find(rec => rec.Name == "KD_VP").Length]; value = record.KD_VP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //15 SUM_TOTAL temp = new byte[columns.Find(rec => rec.Name == "SUM_TOTAL").Length]; value = record.SUM_TOTAL.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //16 SUM_MAX temp = new byte[columns.Find(rec => rec.Name == "SUM_MAX").Length]; value = record.SUM_MAX.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //17 SUM_DIFF temp = new byte[columns.Find(rec => rec.Name == "SUM_DIFF").Length]; value = record.SUM_DIFF.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //18 SUM_INS temp = new byte[columns.Find(rec => rec.Name == "SUM_INS").Length]; value = record.SUM_INS.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //19 SUM_NARAH temp = new byte[columns.Find(rec => rec.Name == "SUM_NARAH").Length]; value = record.SUM_NARAH.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //20 OTK temp = new byte[columns.Find(rec => rec.Name == "OTK").Length]; value = record.OTK.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //21 EXP temp = new byte[columns.Find(rec => rec.Name == "EXP").Length]; value = record.EXP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //22 NRC temp = new byte[columns.Find(rec => rec.Name == "NRC").Length]; value = record.NRC.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //23 NRM temp = new byte[columns.Find(rec => rec.Name == "NRM").Length]; value = record.NRM.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); temp = new byte[columns.Find(rec => rec.Name == "OZN").Length]; value = record.OZN.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); } writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); return(true); }
public static void Reproject(string destinationFolder, string projection, params ReprojectShapefile[] shapes) { ProjectionInfo targetProjection = ProjectionInfo.FromEsriString(projection); foreach (ReprojectShapefile shape in shapes) { string shapePath = Path.Combine(destinationFolder, shape.DestinationName); ShapefileHeader shapeHeader = ShapefileHeader.CreateEmpty(ShapefileGeometryType.Polygon); DbfHeader dbfHeader = new DbfHeader(); dbfHeader.AddCharacter("Label", 80); GeometryFactory gf = new GeometryFactory(); using (ShapefileDataWriter writer = ShapefileDataWriter.Create(shapePath + ".shp", dbfHeader, shapeHeader)) { GeometryTransform transform = null; if (File.Exists(Path.ChangeExtension(shape.Source, ".prj"))) { transform = GeometryTransform.GetTransform(shape.Source, projection); if (transform != null) { File.WriteAllText(shapePath + ".prj", projection); } else { File.Copy(Path.ChangeExtension(shape.Source, ".prj"), shapePath + ".prj"); } } using (ShapefileIndexReader index = new ShapefileIndexReader(Path.ChangeExtension(shape.Source, ".shx"))) { if (transform != null) { writer.Header.Bounds.ExpandToInclude(transform.Apply(index.Header.Bounds)); } else { writer.Header.Bounds.ExpandToInclude(index.Header.Bounds); } Task[] tasks = new Task[Environment.ProcessorCount]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Factory.StartNew(() => { using (ShapefileBlockReader reader = new ShapefileBlockReader(shape.Source, index, gf, transform)) { while (reader.Read()) { writer.Write(reader.Geometry, reader.Record.GetString(shape.Label)); } } }); } Task.WaitAll(tasks); writer.Flush(); } } } }
//Экспорт таблицы 6 ЕСВ в dbf public static bool ExportDFToDbf(string path, int qrt, List <ExportDFRec> export, out string error) { error = string.Empty; if (path == string.Empty) { error = "path == string.Empty"; return(false); } if (!Directory.Exists(path)) { error = "!Directory.Exists(path)"; return(false); } if (qrt <= 0) { error = "qrt <= 0"; return(false); } if (qrt > 4) { error = "qrt > 4"; return(false); } if (export == null) { error = "export == null"; return(false); } string pathToDBF = path + "\\" + _nameDF + "." + qrt; try { File.Delete(pathToDBF); } catch (Exception exc) { error = exc.Message; return(false); } Encoding encoding = Encoding.GetEncoding(1251); Stream stream = new FileStream(pathToDBF, FileMode.Create); BinaryWriter writer = new BinaryWriter(stream); List <DbfFieldDescriptor> columns = GetFieldDbfDF(); DbfHeader header = new DbfHeader(export.Count, columns.Count, DbfFileFormat.GetRecordLength(columns), encoding); writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(header)); foreach (DbfFieldDescriptor item in columns) { writer.Write(IRI.Ket.IO.BinaryStream.StructureToByteArray(item)); } //Terminator writer.Write(byte.Parse("0D", System.Globalization.NumberStyles.HexNumber)); foreach (ExportDFRec record in export) { // All dbf field records begin with a deleted flag field. Deleted - 0x2A (asterisk) else 0x20 (space) writer.Write(byte.Parse("20", System.Globalization.NumberStyles.HexNumber)); byte[] temp = null; string value = null; //1 NP temp = new byte[columns.Find(rec => rec.Name == "NP").Length]; value = record.NP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //2 PERIOD temp = new byte[columns.Find(rec => rec.Name == "PERIOD").Length]; value = record.PERIOD.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //3 RIK temp = new byte[columns.Find(rec => rec.Name == "RIK").Length]; value = record.RIK.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //4 KOD temp = new byte[columns.Find(rec => rec.Name == "KOD").Length]; value = record.KOD; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //5 TYP temp = new byte[columns.Find(rec => rec.Name == "TYP").Length]; value = record.TYP.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //6 TIN temp = new byte[columns.Find(rec => rec.Name == "TIN").Length]; value = record.TIN; encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //7 S_NAR temp = new byte[columns.Find(rec => rec.Name == "S_NAR").Length]; value = record.S_NAR.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //8 S_DOX temp = new byte[columns.Find(rec => rec.Name == "S_DOX").Length]; value = record.S_DOX.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //9 S_TAXN temp = new byte[columns.Find(rec => rec.Name == "S_TAXN").Length]; value = record.S_TAXN.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //10 S_TAXP temp = new byte[columns.Find(rec => rec.Name == "S_TAXP").Length]; value = record.S_TAXP.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //11 OZN_DOX temp = new byte[columns.Find(rec => rec.Name == "OZN_DOX").Length]; value = record.OZN_DOX.ToString().Replace(',', '.'); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //12 D_PRIYN temp = new byte[columns.Find(rec => rec.Name == "D_PRIYN").Length]; value = record.D_PRIYN.ToString("yyyyMMdd"); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); ////13 D_ZVILN temp = new byte[columns.Find(rec => rec.Name == "D_ZVILN").Length]; value = record.D_ZVILN.ToString("yyyyMMdd"); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //14 OZN_PILG temp = new byte[columns.Find(rec => rec.Name == "OZN_PILG").Length]; value = record.OZN_PILG.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); //15 OZNAKA temp = new byte[columns.Find(rec => rec.Name == "OZNAKA").Length]; value = record.OZNAKA.ToString(); encoding.GetBytes(value, 0, value.Length, temp, 0); writer.Write(temp); } writer.Write(byte.Parse("1A", System.Globalization.NumberStyles.HexNumber)); writer.Close(); stream.Close(); return(true); }