private static void LoadColumn(Dbf dbf, XmlReader reader, bool editor) { DbfColumn column = new DbfColumn(); string name = reader["name"]; string str = reader["type"]; if ((name == null) || (str == null)) { Debug.LogError(string.Format("DbfXml.LoadColumn() - failed to load column for DBF {1}", dbf.GetName())); } column.SetName(name); column.SetDataType(EnumUtils.GetEnum <DbfDataType>(str)); if (editor) { bool flag; string str3 = reader["client"]; if ((str3 != null) && bool.TryParse(str3, out flag)) { column.SetClient(flag); } string comment = reader["comment"]; if (comment != null) { column.SetComment(comment); } } dbf.AddColumn(column); }
public AddDbfColumnWindow(DbfColumn dbfColumn, IEnumerable <string> columnNames, DbfColumnMode columnMode, bool isEditing = false, string aliasName = "") { UnitTestHelper.ApplyWindowStyle(this); InitializeComponent(); if (!EditorUIPlugin.IsRelateAndAliasEnabled) { AliasGrid.RowDefinitions[0].Height = new GridLength(0); } addNewColumnViewModel.ColumnNames = new List <string>(columnNames); if (isEditing) { ColumnValues.Visibility = Visibility.Visible; } else { ColumnValues.Visibility = Visibility.Collapsed; } if (dbfColumn != null) { addNewColumnViewModel.ColumnName = dbfColumn.ColumnName; addNewColumnViewModel.AliasName = aliasName; addNewColumnViewModel.ColumnType = dbfColumn.ColumnType; switch (columnMode) { case DbfColumnMode.Empty: addNewColumnViewModel.IsEmptyChecked = true; break; case DbfColumnMode.Calculated: addNewColumnViewModel.IsCalculatedChecked = true; break; default: break; } addNewColumnViewModel.DecimalLength = dbfColumn.DecimalLength; addNewColumnViewModel.Length = dbfColumn.Length; originalColumnLength = dbfColumn.Length; if (columnMode == DbfColumnMode.Calculated) { addNewColumnViewModel.CalculationType = ((CalculatedDbfColumn)dbfColumn).CalculationType; addNewColumnViewModel.MeasurementUnit = ((CalculatedDbfColumn)dbfColumn).AreaUnit; } addNewColumnViewModel.ColumnNames.Remove(dbfColumn.ColumnName); } Messenger.Default.Register <bool>(this, DataContext, ProcessAddNewColumnMessage); Closing += (s, e) => Messenger.Default.Unregister(this); }
private static string GenerateCreateTableQuery(ShapefileDataReader reader, string tableName) { StringBuilder bldr = new StringBuilder(); bldr.Append("CREATE TABLE ["); bldr.Append(tableName); bldr.Append("] ([Id_"); bldr.Append(tableName); bldr.Append("] [int] IDENTITY(1,1) NOT NULL, [Geom] [geometry] NOT NULL, "); DbfHeader header = reader.DbfHeader; for (int i = 0; i < header.Count; i++) { DbfColumn col = header[i]; bldr.Append('['); bldr.Append(col.Name); bldr.Append("] "); switch (col.Type) { case DbfColumnType.Character: bldr.Append("[varchar]("); bldr.Append(col.Length); bldr.Append(")"); break; case DbfColumnType.Float: case DbfColumnType.Number: bldr.Append("[float]"); break; case DbfColumnType.Boolean: bldr.Append("[bit]"); break; case DbfColumnType.Date: bldr.Append("[datetime]"); break; default: throw new Exception(String.Format("Column type '{0}' is not supported.", (char)col.Type)); } bldr.Append(" NULL, "); } bldr.Append("CONSTRAINT [PK_"); bldr.Append(tableName); bldr.Append("] PRIMARY KEY CLUSTERED ([Id_"); bldr.Append(tableName); bldr.Append("] ASC) WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON))"); return(bldr.ToString()); }
public DbfDataType GetDataType(string columnName) { DbfColumn column = this.GetColumn(columnName); if (column == null) { return(DbfDataType.INVALID); } return(column.GetDataType()); }
public void CreateHeader() { foreach (var relation in _columnRelations) { var columnType = (DbfColumn.DbfColumnType)Enum.Parse(typeof(DbfColumn.DbfColumnType), relation.FileColumnType, true); var column = new DbfColumn(relation.FileColumnName, columnType, relation.FileColumnLength.HasValue ? relation.FileColumnLength.Value : 0, relation.FileColumnDecimals.HasValue ? relation.FileColumnDecimals.Value : 0); _file.Header.AddColumn(column); } _file.WriteHeader(); }
private static Collection <DbfColumn> ConvertToDbfColumns(IEnumerable <FeatureSourceColumn> featureSourceColumns) { Collection <DbfColumn> dbfColumns = new Collection <DbfColumn>(); foreach (var column in featureSourceColumns) { DbfColumnType columnType = (DbfColumnType)Enum.Parse(typeof(DbfColumnType), column.TypeName); DbfColumn dbfColumn = new DbfColumn(column.ColumnName, columnType, column.MaxLength, GetDecimalLength(columnType, column.MaxLength)); dbfColumns.Add(dbfColumn); } return(dbfColumns); }
protected void ValidateColumn(DbfColumn dbfColumn, string line) { var expectedName = line.Substring(0, 16).Trim(); var expectedColumnType = (DbfColumnType)line.Substring(17, 1)[0]; var expectedLength = int.Parse(line.Substring(28, 10)); var expectedDecimalCount = int.Parse(line.Substring(39)); dbfColumn.ColumnName.ShouldBe(expectedName); dbfColumn.ColumnType.ShouldBe(expectedColumnType); dbfColumn.Length.ShouldBe(expectedLength); dbfColumn.DecimalCount.ShouldBe(expectedDecimalCount); }
public static Collection <DbfColumn> GetAllColumns(this GeoDbf geoDbf, bool includeNullColumnType = false) { Collection <DbfColumn> dbfColumns = new Collection <DbfColumn>(); for (int i = 1; i <= geoDbf.ColumnCount; i++) { DbfColumn column = geoDbf.GetColumn(i); if (includeNullColumnType || column.ColumnType != DbfColumnType.Null) { dbfColumns.Add(column); } } return(dbfColumns); }
public DbfColumn GetColumn(string columnName) { if (columnName != null) { for (int i = 0; i < this.m_columns.Count; i++) { DbfColumn column = this.m_columns[i]; if (columnName.Equals(column.GetName(), StringComparison.OrdinalIgnoreCase)) { return(column); } } } return(null); }
public FeatureViewModel(Feature newFeature, FeatureLayer ownerFeatureLayer) { header = String.Format(CultureInfo.InvariantCulture, "{0}", newFeature.Id); //.Length > 5 ? feature.Feature.Id.Substring(0, 5) + "..." : feature.Feature.Id); string featureIdColumn = LayerPluginHelper.GetFeatureIdColumn(ownerFeatureLayer); if (newFeature.ColumnValues.ContainsKey(featureIdColumn)) { header = String.Format(CultureInfo.InvariantCulture, "{0}", newFeature.ColumnValues[featureIdColumn]); } //if (newFeature.LinkColumnValues.ContainsKey(featureIdColumn)) //{ // Collection<LinkColumnValue> linkColumnValues = newFeature.LinkColumnValues[featureIdColumn]; // header = String.Format(CultureInfo.InvariantCulture, "{0}", string.Join("|||", linkColumnValues.Select(lcv => lcv.Value.ToString()))); //} if (string.IsNullOrEmpty(header) || string.IsNullOrWhiteSpace(header)) { header = "[NONE]"; } featureId = newFeature.Id; wkt = newFeature.GetWellKnownText(); this.ownerFeatureLayer = ownerFeatureLayer; feature = newFeature.CloneDeep(ReturningColumnsType.AllColumns); table = new DataTable(); table.Columns.Add("Column", typeof(string)); table.Columns.Add("Value", typeof(string)); table.Columns.Add("RealValue", typeof(string)); Collection <FeatureSourceColumn> featureColumns = new Collection <FeatureSourceColumn>(); FeatureLayer columnSourceLayer = newFeature.Tag as FeatureLayer; if (columnSourceLayer == null) { columnSourceLayer = ownerFeatureLayer; } columnSourceLayer.SafeProcess(() => { var tempFeatureColumns = columnSourceLayer.QueryTools.GetColumns(); foreach (var item in tempFeatureColumns) { featureColumns.Add(item); } }); if (CalculatedDbfColumn.CalculatedColumns.ContainsKey(columnSourceLayer.FeatureSource.Id)) { foreach (var item in CalculatedDbfColumn.CalculatedColumns[columnSourceLayer.FeatureSource.Id]) { featureColumns.Add(item); } } foreach (var column in featureColumns) { if (!string.IsNullOrEmpty(column.ColumnName) && newFeature.ColumnValues.ContainsKey(column.ColumnName)) { string realValue = newFeature.ColumnValues[column.ColumnName]; string value = realValue; object valueResult = null; if (column.TypeName.Equals("Double", StringComparison.InvariantCultureIgnoreCase) || column.TypeName.Equals("Float", StringComparison.InvariantCultureIgnoreCase)) { double doubleValue; if (double.TryParse(value, out doubleValue)) { value = doubleValue.ToString(CultureInfo.InvariantCulture); DbfColumn dbfColumn = column as DbfColumn; if (dbfColumn != null) { value = doubleValue.ToString("N" + dbfColumn.DecimalLength); } } } else if (column.TypeName.Equals("Integer", StringComparison.InvariantCultureIgnoreCase) || column.TypeName.Equals("Numeric", StringComparison.InvariantCultureIgnoreCase)) { int intValue; value = int.TryParse(value, out intValue) ? intValue.ToString(CultureInfo.InvariantCulture) : string.Empty; } else { valueResult = value; //valueResult = new Uri(value, UriKind.RelativeOrAbsolute); } if (valueResult == null) { valueResult = value; } //if (valueResult == null) valueResult = new Uri(value, UriKind.RelativeOrAbsolute); string alias = ownerFeatureLayer.FeatureSource.GetColumnAlias(column.ColumnName); table.Rows.Add(alias, valueResult, realValue); } } //foreach (KeyValuePair<string, Collection<LinkColumnValue>> item in newFeature.LinkColumnValues.Where(i => !newFeature.ColumnValues.ContainsKey(i.Key))) //{ // string alias = ownerFeatureLayer.FeatureSource.GetColumnAlias(item.Key); // Collection<LinkColumnValue> linkColumnValues = item.Value; // if (linkColumnValues != null) // { // IEnumerable<string> filteredColumnValues = linkColumnValues // .Select(v => // { // if (v.Value != null) // { // if (v.Value is DateTime) // { // DateTime dateTime = (DateTime)v.Value; // if (dateTime != DateTime.MinValue) // { // return dateTime.ToString("MM/dd/yyyy"); // } // return string.Empty; // } // return v.Value.ToString(); // } // return null; // }) // .Where(v => v != null); // string value = string.Join(Environment.NewLine, filteredColumnValues); // string realValue = string.Join(Environment.NewLine, linkColumnValues.Where(c => c != null).Where(v => !string.IsNullOrEmpty(v.ToString())).Select(l => l.Value)); // if (Uri.IsWellFormedUriString(value, UriKind.RelativeOrAbsolute)) // { // table.Rows.Add(alias, new Uri(value, UriKind.RelativeOrAbsolute), realValue); // } // else // { // table.Rows.Add(alias, value, realValue); // } // } // else // { // table.Rows.Add(alias, new Uri("", UriKind.RelativeOrAbsolute), null); // } //} }
private void ExportToShapeFile(Collection <Feature> resultFeatures, Collection <FeatureSourceColumn> columns, FeatureLayerPlugin sourceLayerPlugin, WellKnownType type) { int count = resultFeatures.Count; if (count > 0) { FeatureLayerPlugin targetLayerPlugin = (FeatureLayerPlugin)GisEditor.LayerManager.GetLayerPlugins(typeof(ShapeFileFeatureLayer)).FirstOrDefault(); FeatureLayer resultLayer = null; if (targetLayerPlugin != null) { GetLayersParameters getLayerParameters = new GetLayersParameters(); ConfigureFeatureLayerParameters parameters = targetLayerPlugin.GetCreateFeatureLayerParameters(columns); if (parameters != null && sourceLayerPlugin != null) { bool needColumns = false; Collection <string> tempColumns = new Collection <string>(); if (parameters.CustomData.ContainsKey("Columns")) { tempColumns = parameters.CustomData["Columns"] as Collection <string>; } else { needColumns = true; } var featureColumns = columns.Where(c => needColumns || tempColumns.Contains(c.ColumnName)); if (targetLayerPlugin.CanCreateFeatureLayerWithSourceColumns(sourceLayerPlugin)) { foreach (var item in featureColumns) { FeatureSourceColumn column = new FeatureSourceColumn(item.ColumnName, item.TypeName, item.MaxLength); if (column.TypeName.Equals("c", StringComparison.InvariantCultureIgnoreCase)) { column.TypeName = "Character"; } parameters.AddedColumns.Add(column); } } else { var geoColumns = sourceLayerPlugin.GetIntermediateColumns(featureColumns); foreach (var item in geoColumns) { if (item.TypeName.Equals("c", StringComparison.InvariantCultureIgnoreCase)) { item.TypeName = "Character"; } parameters.AddedColumns.Add(item); } } parameters.WellKnownType = type; //parameters.CustomData["SourceLayer"] = featureLayer; getLayerParameters.LayerUris.Add(parameters.LayerUri); foreach (var item in parameters.CustomData) { getLayerParameters.CustomData[item.Key] = item.Value; } Proj4Projection proj4 = new Proj4Projection(); proj4.InternalProjectionParametersString = parameters.Proj4ProjectionParametersString; proj4.ExternalProjectionParametersString = GisEditor.ActiveMap.DisplayProjectionParameters; proj4.SyncProjectionParametersString(); proj4.Open(); foreach (var item in resultFeatures) { Feature feature = proj4.ConvertToInternalProjection(item); parameters.AddedFeatures.Add(feature); } if (parameters.MemoColumnConvertMode == MemoColumnConvertMode.ToCharacter) { foreach (var item in parameters.AddedColumns.Where(c => c.TypeName.Equals("Memo", StringComparison.InvariantCultureIgnoreCase)).ToList()) { item.TypeName = "Character"; item.MaxLength = 254; DbfColumn tmpDbfColumn = item as DbfColumn; if (tmpDbfColumn != null) { tmpDbfColumn.ColumnType = DbfColumnType.Character; tmpDbfColumn.Length = 254; } } } resultLayer = targetLayerPlugin.CreateFeatureLayer(parameters); resultLayer.FeatureSource.Projection = proj4; resultLayer = targetLayerPlugin.GetLayers(getLayerParameters).FirstOrDefault() as FeatureLayer; } } if (resultLayer != null) { GisEditorMessageBox messageBox = new GisEditorMessageBox(MessageBoxButton.YesNo); messageBox.Owner = Application.Current.MainWindow; messageBox.WindowStartupLocation = WindowStartupLocation.CenterOwner; messageBox.Title = GisEditor.LanguageManager.GetStringResource("NavigatePluginAddToMap"); messageBox.Message = GisEditor.LanguageManager.GetStringResource("DoYouWantToAddToMap"); messageBox.ErrorMessage = string.Empty; if (messageBox.ShowDialog().Value) { GisEditor.ActiveMap.AddLayerToActiveOverlay(resultLayer); GisEditor.ActiveMap.RefreshActiveOverlay(); RefreshArgs refreshArgs = new RefreshArgs(this, "LoadToMapCore"); InvokeRefreshPlugins(GisEditor.UIManager, refreshArgs); GisEditor.ActiveMap.Refresh(); } } } else { System.Windows.Forms.MessageBox.Show("There is no features to export.", "Export"); } }
private void CreateShapeFile(ObservableCollection <FeatureSourceColumn> includedColumnsList , string OutputPath, Encoding ShapeFileEncoding, string csvFilePath , List <Feature> features , bool isIncludeAllFeatures , IEnumerable <MatchCondition> matchConditions , Action <UpdatingTaskProgressEventArgs> updateAction , Dictionary <string, string> invalidColumns) { Collection <DbfColumn> includeColumns = new Collection <DbfColumn>(); RemoveUnduplicateColumn(includedColumnsList); foreach (var column in includedColumnsList) { DbfColumnType tmpDbfColumnType = DbfColumnType.Character; if (Enum.TryParse(column.TypeName, out tmpDbfColumnType)) { DbfColumn dbfColumn = new DbfColumn(column.ColumnName, tmpDbfColumnType, column.MaxLength, 0); includeColumns.Add(dbfColumn); } } ShapeFileType shapeFileType = GetShapeFileType(features.FirstOrDefault()); if (shapeFileType != ShapeFileType.Null) { ShapeFileFeatureLayer.CreateShapeFile(shapeFileType, OutputPath, includeColumns, ShapeFileEncoding, OverwriteMode.Overwrite); var dataTable = DataJoinAdapter.ReadDataToDataGrid(csvFilePath, Delimiter); var featureRows = dataTable.Rows; var index = 0; var count = features.Count; ShapeFileFeatureLayer newShapeFileFeatureLayer = new ShapeFileFeatureLayer(OutputPath, GeoFileReadWriteMode.ReadWrite); newShapeFileFeatureLayer.SafeProcess(() => { newShapeFileFeatureLayer.EditTools.BeginTransaction(); foreach (var feature in features) { index++; try { var matchedDataRow = featureRows.Cast <DataRow>().FirstOrDefault(r => matchConditions.All(tmpCondition => feature.ColumnValues[tmpCondition.SelectedLayerColumn.ColumnName] == r[tmpCondition.SelectedDelimitedColumn.ColumnName].ToString())); if (matchedDataRow != null) { SetFeatureColumnValues(feature, matchedDataRow, includedColumnsList, invalidColumns); newShapeFileFeatureLayer.EditTools.Add(feature); } else if (isIncludeAllFeatures) { newShapeFileFeatureLayer.EditTools.Add(feature); } if (UpdateProgress(updateAction, index, count)) { break; } } catch (Exception ex) { var errorEventArgs = new UpdatingTaskProgressEventArgs(TaskState.Error); errorEventArgs.Error = new ExceptionInfo(string.Format(CultureInfo.InvariantCulture, "Feature id: {0}, {1}" , feature.Id, ex.Message) , ex.StackTrace , ex.Source); GisEditor.LoggerManager.Log(LoggerLevel.Debug, ex.Message, new ExceptionInfo(ex)); errorEventArgs.Message = feature.Id; updateAction(errorEventArgs); } } newShapeFileFeatureLayer.EditTools.CommitTransaction(); }); SavePrjFile(OutputPath, DisplayProjectionParameters); } }
private void DataJoinShapeFile() { var args = new UpdatingTaskProgressEventArgs(TaskState.Updating); ShapeFileFeatureSource currentSource = ShapeFileFeatureSource; if (!currentSource.IsOpen) { currentSource.Open(); } var index = 0; var count = currentSource.GetAllFeatures(ReturningColumnsType.AllColumns).Count; Collection <DbfColumn> includeColumns = new Collection <DbfColumn>(); RemoveUnduplicateColumn(IncludedColumnsList); foreach (var column in IncludedColumnsList) { DbfColumnType tmpDbfColumnType = DbfColumnType.Character; if (Enum.TryParse(column.TypeName, out tmpDbfColumnType)) { DbfColumn dbfColumn = new DbfColumn(column.ColumnName, tmpDbfColumnType, column.MaxLength, 0); includeColumns.Add(dbfColumn); } } ShapeFileType shapeFileType = GetShapeFileType(currentSource.GetAllFeatures(ReturningColumnsType.AllColumns).FirstOrDefault()); var projectionWkt = Proj4Projection.ConvertProj4ToPrj(DisplayProjectionParameters); var dataTable = DataJoinAdapter.ReadDataToDataGrid(CsvFilePath, Delimiter); var featureRows = dataTable.Rows; var helper = new ShapeFileHelper(shapeFileType, OutputPathFileName, includeColumns, projectionWkt); helper.ForEachFeatures(currentSource, (f, currentProgress, upperBound, percentage) => { try { bool canceled = false; if (f.GetWellKnownBinary() != null) { index++; try { var matchedDataRow = featureRows.Cast <DataRow>().FirstOrDefault(r => MatchConditions.All(tmpCondition => f.ColumnValues[tmpCondition.SelectedLayerColumn.ColumnName] == r[tmpCondition.SelectedDelimitedColumn.ColumnName].ToString())); if (matchedDataRow != null) { SetFeatureColumnValues(f, matchedDataRow, IncludedColumnsList, InvalidColumns); helper.Add(f); } else if (IsIncludeAllFeatures) { helper.Add(f); } if (UpdateProgress(OnUpdatingProgress, index, count)) { canceled = true; } } catch (Exception ex) { var errorEventArgs = new UpdatingTaskProgressEventArgs(TaskState.Error); errorEventArgs.Error = new ExceptionInfo(string.Format(CultureInfo.InvariantCulture, "Feature id: {0}, {1}" , f.Id, ex.Message) , ex.StackTrace , ex.Source); GisEditor.LoggerManager.Log(LoggerLevel.Debug, ex.Message, new ExceptionInfo(ex)); errorEventArgs.Message = f.Id; OnUpdatingProgress(errorEventArgs); } } args = new UpdatingTaskProgressEventArgs(TaskState.Updating, percentage); args.Current = currentProgress; args.UpperBound = upperBound; OnUpdatingProgress(args); canceled = args.TaskState == TaskState.Canceled; return(canceled); } catch { return(false); } }); helper.Commit(); SavePrjFile(OutputPathFileName, DisplayProjectionParameters); }
protected override FeatureLayer CreateFeatureLayerCore(ConfigureFeatureLayerParameters featureLayerStructureParameters) { string layerPath = LayerPluginHelper.GetLayerUriToSave(featureLayerStructureParameters.LayerUri, ExtensionFilterCore); if (string.IsNullOrEmpty(layerPath)) { return(null); } featureLayerStructureParameters.LayerUri = new Uri(layerPath); ShapeFileType shapeFileType = ShapeFileType.Null; switch (featureLayerStructureParameters.WellKnownType) { case WellKnownType.Multipoint: shapeFileType = ShapeFileType.Multipoint; break; case WellKnownType.Point: shapeFileType = ShapeFileType.Point; break; case WellKnownType.Line: case WellKnownType.Multiline: shapeFileType = ShapeFileType.Polyline; break; case WellKnownType.Polygon: case WellKnownType.Multipolygon: shapeFileType = ShapeFileType.Polygon; break; } Dictionary <string, DbfColumn> dbfColumns = new Dictionary <string, DbfColumn>(); Collection <FeatureSourceColumn> addedColumns = featureLayerStructureParameters.AddedColumns; Dictionary <string, string> oldNewNames = new Dictionary <string, string>(); Collection <Feature> addedFeatures = featureLayerStructureParameters.AddedFeatures; bool truncateLongColumn = featureLayerStructureParameters.LongColumnTruncateMode == LongColumnTruncateMode.Truncate; if (truncateLongColumn) { Dictionary <string, string> editColumns = new Dictionary <string, string>(); if (featureLayerStructureParameters.CustomData.ContainsKey("EditedColumns")) { editColumns = featureLayerStructureParameters.CustomData["EditedColumns"] as Dictionary <string, string>; } addedColumns = TruncateLongColumnNames(featureLayerStructureParameters.AddedColumns, oldNewNames, editColumns); } foreach (var column in addedColumns) { if (!string.IsNullOrEmpty(column.ColumnName)) { DbfColumn dbfColumn = column as DbfColumn; if (dbfColumn != null) { if (dbfColumn.ColumnType == DbfColumnType.DoubleInBinary || dbfColumn.ColumnType == DbfColumnType.DateTime) { dbfColumn.Length = 8; dbfColumn.DecimalLength = 0; } else if (dbfColumn.ColumnType == DbfColumnType.IntegerInBinary) { dbfColumn.Length = 4; dbfColumn.DecimalLength = 0; } } else { int columnLenght = column.MaxLength; int decimalLength = 0; switch (column.TypeName.ToUpperInvariant()) { case "DOUBLE": case "NUMERIC": columnLenght = columnLenght == 0 ? 10 : columnLenght; if (columnLenght < 4) { columnLenght = 10; } decimalLength = 4; break; case "DATE": case "DATETIME": columnLenght = columnLenght == 0 ? 10 : columnLenght; decimalLength = 0; break; case "INTEGER": case "INT": columnLenght = columnLenght == 0 ? 10 : columnLenght; decimalLength = 0; break; case "STRING": case "CHARACTER": columnLenght = columnLenght == 0 ? characterTypeLength : columnLenght; decimalLength = 0; break; case "LOGICAL": columnLenght = 5; decimalLength = 0; break; } DbfColumnType type = DbfColumnType.Character; if (column.TypeName.Equals("DOUBLE", StringComparison.InvariantCultureIgnoreCase)) { column.TypeName = DbfColumnType.Float.ToString(); } if (column.TypeName.Equals("INT", StringComparison.InvariantCultureIgnoreCase)) { column.TypeName = DbfColumnType.Numeric.ToString(); } bool isSuccess = Enum.TryParse <DbfColumnType>(column.TypeName, true, out type); if (!isSuccess) { type = DbfColumnType.Character; } dbfColumn = new DbfColumn(column.ColumnName, type, columnLenght, decimalLength); dbfColumn.TypeName = column.TypeName; dbfColumn.MaxLength = column.MaxLength; } //Feature firstFeature = featureLayerStructureParameters.AddedFeatures.FirstOrDefault(); ////This is to fix that fox pro columns cannot write to dbf, convert all linked columns to character column type. //string tempColumnName = column.ColumnName; //if (oldNewNames.ContainsValue(column.ColumnName)) //{ // tempColumnName = oldNewNames.FirstOrDefault(f => f.Value == column.ColumnName).Key; //} //if (tempColumnName.Contains(".") && firstFeature != null && firstFeature.LinkColumnValues.ContainsKey(tempColumnName)) //{ // if (dbfColumn.ColumnType != DbfColumnType.Memo) // { // dbfColumn.ColumnType = DbfColumnType.Character; // dbfColumn.Length = characterTypeLength; // dbfColumn.DecimalLength = 0; // } //} dbfColumns[dbfColumn.ColumnName] = dbfColumn; } } bool convertMemoToCharacter = featureLayerStructureParameters.MemoColumnConvertMode == MemoColumnConvertMode.ToCharacter; Dictionary <string, int> columnLength = new Dictionary <string, int>(); foreach (var feature in addedFeatures) { //foreach (var linkColumnValue in feature.LinkColumnValues) //{ // if (!feature.ColumnValues.ContainsKey(linkColumnValue.Key)) // { // string[] values = linkColumnValue.Value.Select(v => // { // if (v.Value == null) // { // return string.Empty; // } // if (v.Value is DateTime) // { // return ((DateTime)v.Value).ToShortDateString(); // } // return v.Value.ToString(); // }).ToArray(); // if (values.All(v => string.IsNullOrEmpty(v) || string.IsNullOrWhiteSpace(v))) // { // if (oldNewNames.ContainsKey(linkColumnValue.Key)) // feature.ColumnValues[oldNewNames[linkColumnValue.Key]] = string.Empty; // else // feature.ColumnValues[linkColumnValue.Key] = string.Empty; // } // else // { // string tempColumName = linkColumnValue.Key; // if (oldNewNames.ContainsKey(linkColumnValue.Key)) // { // tempColumName = oldNewNames[linkColumnValue.Key]; // } // string linkValue = string.Join(",", values); // feature.ColumnValues[tempColumName] = linkValue; // //Choose the max length // if (columnLength.ContainsKey(tempColumName)) // { // if (columnLength[tempColumName] < linkValue.Length) // { // columnLength[tempColumName] = linkValue.Length; // } // } // else // { // columnLength[tempColumName] = linkValue.Length > 254 ? 254 : linkValue.Length; // } // } // } //} foreach (var item in oldNewNames) { if (feature.ColumnValues.ContainsKey(item.Key)) { feature.ColumnValues[oldNewNames[item.Key]] = feature.ColumnValues[item.Key]; feature.ColumnValues.Remove(item.Key); } } if (!convertMemoToCharacter) { foreach (var item in feature.ColumnValues) { if (item.Value.Length > characterTypeLength && dbfColumns[item.Key].ColumnType != DbfColumnType.Memo) { dbfColumns[item.Key].ColumnType = DbfColumnType.Memo; dbfColumns[item.Key].Length = 4; dbfColumns[item.Key].DecimalLength = 0; } } } } foreach (var column in dbfColumns) { Feature firstFeature = featureLayerStructureParameters.AddedFeatures.FirstOrDefault(); //This is to fix that fox pro columns cannot write to dbf, convert all linked columns to character column type. string tempColumnName = column.Key; if (oldNewNames.ContainsValue(column.Key)) { tempColumnName = oldNewNames.FirstOrDefault(f => f.Value == column.Key).Key; } //if (tempColumnName.Contains(".") && firstFeature != null && firstFeature.LinkColumnValues.ContainsKey(tempColumnName)) //{ // if (column.Value.ColumnType != DbfColumnType.Memo) // { // column.Value.ColumnType = DbfColumnType.Character; // //column.Value.Length = characterTypeLength; // column.Value.DecimalLength = 0; // if (columnLength.ContainsKey(tempColumnName) && column.Value.Length < columnLength[tempColumnName]) // { // column.Value.Length = columnLength[tempColumnName]; // } // } //} } ShapeFileFeatureLayer.CreateShapeFile(shapeFileType, featureLayerStructureParameters.LayerUri.OriginalString, dbfColumns.Values, DefaultEncoding, OverwriteMode.Overwrite); string encodingPathFileName = Path.ChangeExtension(featureLayerStructureParameters.LayerUri.OriginalString, ".cpg"); if (File.Exists(encodingPathFileName)) { File.Delete(encodingPathFileName); } File.WriteAllText(encodingPathFileName, DefaultEncoding.CodePage.ToString(CultureInfo.InvariantCulture)); string prjPath = Path.ChangeExtension(featureLayerStructureParameters.LayerUri.OriginalString, "prj"); File.WriteAllText(prjPath, Proj4Projection.ConvertProj4ToPrj(featureLayerStructureParameters.Proj4ProjectionParametersString)); ShapeFileFeatureLayer resultLayer = new ShapeFileFeatureLayer(featureLayerStructureParameters.LayerUri.LocalPath, GeoFileReadWriteMode.ReadWrite); if (addedFeatures.Count > 0) { resultLayer.Open(); resultLayer.EditTools.BeginTransaction(); foreach (var feature in addedFeatures) { if (convertMemoToCharacter) { foreach (var item in dbfColumns) { if (feature.ColumnValues.ContainsKey(item.Key) && feature.ColumnValues[item.Key].Length > 254) { feature.ColumnValues[item.Key] = feature.ColumnValues[item.Key].Substring(0, 254); } if (feature.ColumnValues.ContainsKey(item.Key) && feature.ColumnValues[item.Key].Length > item.Value.MaxLength) { feature.ColumnValues[item.Key] = feature.ColumnValues[item.Key].Substring(0, item.Value.MaxLength); } } } resultLayer.EditTools.Add(feature); } resultLayer.EditTools.CommitTransaction(); resultLayer.Close(); } return(resultLayer); }
public void AddColumn(DbfColumn column) { this.m_columns.Add(column); }
private static string ColumnSchema(DbfColumn dbfColumn) { var schema = string.Empty; switch (dbfColumn.ColumnType) { case DbfColumnType.Boolean: schema = $"[{dbfColumn.ColumnName}] [bit] NULL DEFAULT ((0))"; break; case DbfColumnType.Character: schema = $"[{dbfColumn.ColumnName}] [nvarchar]({dbfColumn.Length}) NULL"; break; case DbfColumnType.Currency: schema = $"[{dbfColumn.ColumnName}] [decimal]({dbfColumn.Length + dbfColumn.DecimalCount},{dbfColumn.DecimalCount}) NULL DEFAULT (NULL)"; break; case DbfColumnType.Date: schema = $"[{dbfColumn.ColumnName}] [date] NULL DEFAULT (NULL)"; break; case DbfColumnType.DateTime: schema = $"[{dbfColumn.ColumnName}] [datetime] NULL DEFAULT (NULL)"; break; case DbfColumnType.Double: schema = $"[{dbfColumn.ColumnName}] [decimal]({dbfColumn.Length + dbfColumn.DecimalCount},{dbfColumn.DecimalCount}) NULL DEFAULT (NULL)"; break; case DbfColumnType.Float: schema = $"[{dbfColumn.ColumnName}] [decimal]({dbfColumn.Length + dbfColumn.DecimalCount},{dbfColumn.DecimalCount}) NULL DEFAULT (NULL)"; break; case DbfColumnType.General: schema = $"[{dbfColumn.ColumnName}] [nvarchar]({dbfColumn.Length}) NULL"; break; case DbfColumnType.Memo: schema = $"[{dbfColumn.ColumnName}] [ntext] NULL"; break; case DbfColumnType.Number: if (dbfColumn.DecimalCount > 0) { schema = $"[{dbfColumn.ColumnName}] [decimal]({dbfColumn.Length + dbfColumn.DecimalCount},{dbfColumn.DecimalCount}) NULL DEFAULT (NULL)"; } else { schema = $"[{dbfColumn.ColumnName}] [int] NULL DEFAULT (NULL)"; } break; case DbfColumnType.SignedLong: schema = $"[{dbfColumn.ColumnName}] [int] NULL DEFAULT (NULL)"; break; } return(schema); }
private static bool IsNumericColumn(DbfColumn column) { return(column.ColumnType == DbfColumnType.Float || column.ColumnType == DbfColumnType.Numeric); }
private void Export(IGrouping <ShapeFileType, Feature> group, FileExportInfo info) { string path = info.Path; if (File.Exists(path)) { if (info.Overwrite) { string[] suffixes = { ".shp", ".shx", ".ids", ".idx", ".dbf", ".prj" }; foreach (var suffix in suffixes) { string fileToRemove = Path.ChangeExtension(path, suffix); if (File.Exists(fileToRemove)) { File.Delete(fileToRemove); } } } else { string dir = Path.GetDirectoryName(path); string fileName = Path.GetFileNameWithoutExtension(path); string extension = Path.GetExtension(path); path = Path.Combine(dir, fileName + group.Key.ToString() + extension); } } var dbfColumns = info.Columns.Select(column => { DbfColumnType columnType = (DbfColumnType)Enum.Parse(typeof(DbfColumnType), column.TypeName); DbfColumn dbfColumn = new DbfColumn(column.ColumnName, columnType, column.MaxLength, GetDecimalLength(columnType, column.MaxLength)); return(dbfColumn); }); ShapeFileFeatureLayer.CreateShapeFile(group.Key, path, dbfColumns); ShapeFileFeatureLayer layer = new ShapeFileFeatureLayer(path, GeoFileReadWriteMode.ReadWrite); try { layer.Open(); layer.EditTools.BeginTransaction(); foreach (var feature in group) { bool isValid = true; var newFeature = feature; if (!feature.IsValid()) { if (feature.CanMakeValid) { newFeature = feature.MakeValid(); } else { isValid = false; } } if (isValid) { var featureSourceColumns = layer.FeatureSource.GetColumns(); var tempColumnNames = featureSourceColumns.Select(column => column.ColumnName); var validColumns = GeoDbf.GetValidColumnNames(tempColumnNames); Dictionary <string, string> columnValues = new Dictionary <string, string>(); for (int i = 0; i < validColumns.Count(); i++) { var columnName = dbfColumns.ElementAt(i).ColumnName; if (newFeature.ColumnValues.ContainsKey(columnName)) { columnValues.Add(validColumns.ElementAt(i), newFeature.ColumnValues[columnName]); } } Feature validFeature = new Feature(newFeature.GetWellKnownBinary(), newFeature.Id, columnValues); layer.EditTools.Add(validFeature); } } layer.EditTools.CommitTransaction(); layer.Close(); SavePrjFile(path, info.ProjectionWkt); RebuildDbf(path); } catch (Exception ex) { GisEditor.LoggerManager.Log(LoggerLevel.Debug, ex.Message, new ExceptionInfo(ex)); if (layer.EditTools.IsInTransaction) { layer.EditTools.CommitTransaction(); } if (layer.IsOpen) { layer.Close(); } string[] suffixes = { ".shp", ".shx", ".ids", ".idx", ".dbf", ".prj" }; foreach (var suffix in suffixes) { string fileToRemove = Path.ChangeExtension(path, suffix); if (File.Exists(fileToRemove)) { File.Delete(fileToRemove); } } throw new OperationCanceledException("Shapefile generates failed.", ex); } }
private void RefreshColumnList(FeatureLayer featureLayer) { Collection <ViewColumnItem> viewColumnItems = new Collection <ViewColumnItem>(); featureLayer.SafeProcess(() => { foreach (var column in featureLayer.FeatureSource.GetColumns()) { if (!string.IsNullOrEmpty(column.ColumnName)) { string alias = featureLayer.FeatureSource.GetColumnAlias(column.ColumnName); ViewColumnItem item = new ViewColumnItem(column, alias); viewColumnItems.Add(item); } } }); if (CalculatedDbfColumn.CalculatedColumns.ContainsKey(featureLayer.FeatureSource.Id)) { foreach (var column in CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id]) { string alias = featureLayer.FeatureSource.GetColumnAlias(column.ColumnName); ViewColumnItem item = new ViewColumnItem(column, alias, true); item.EditAction = c => { DbfColumn dbfColumn = (DbfColumn)c; Collection <string> columns = new Collection <string>(); columns.Add(c.ColumnName); string tempAlias = featureLayer.FeatureSource.GetColumnAlias(dbfColumn.ColumnName); AddDbfColumnWindow window = new AddDbfColumnWindow(dbfColumn, columns, DbfColumnMode.Calculated, true, tempAlias); window.WindowStartupLocation = WindowStartupLocation.CenterOwner; window.Owner = Application.Current.MainWindow; if (window.ShowDialog().GetValueOrDefault()) { CalculatedDbfColumn newColumn = window.DbfColumn as CalculatedDbfColumn; if (newColumn != null) { //Check does edit CalculatedDbfColumn tempColumn = (CalculatedDbfColumn)c; if (newColumn.ColumnName == tempColumn.ColumnName && newColumn.CalculationType == tempColumn.CalculationType && newColumn.ColumnType == tempColumn.ColumnType && newColumn.DecimalLength == tempColumn.DecimalLength && newColumn.Length == tempColumn.Length && newColumn.LengthUnit == tempColumn.LengthUnit && newColumn.MaxLength == tempColumn.MaxLength && newColumn.AreaUnit == tempColumn.AreaUnit && newColumn.TypeName == tempColumn.TypeName) { return; } if (CheckHasDuplicatedColumn(newColumn)) { return; } if (CalculatedDbfColumn.CalculatedColumns.ContainsKey(featureLayer.FeatureSource.Id)) { CalculatedDbfColumn calColumn = (CalculatedDbfColumn)c; if (CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id].Contains(calColumn)) { CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id].Remove(calColumn); } CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id].Add(newColumn); } RefreshColumnList(featureLayer); } } }; item.DeleteAction = c => { if (CalculatedDbfColumn.CalculatedColumns.ContainsKey(featureLayer.FeatureSource.Id)) { CalculatedDbfColumn calColumn = (CalculatedDbfColumn)c; if (CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id].Contains(calColumn)) { CalculatedDbfColumn.CalculatedColumns[featureLayer.FeatureSource.Id].Remove(calColumn); RefreshColumnList(featureLayer); } } }; viewColumnItems.Add(item); } } ColumnList.ItemsSource = viewColumnItems; }
private void Export(IGrouping <ShapeFileType, Feature> group, FileExportInfo info) { string path = info.Path; if (File.Exists(path)) { if (info.Overwrite) { string[] suffixes = { ".shp", ".shx", ".ids", ".idx", ".dbf", ".prj" }; foreach (var suffix in suffixes) { string fileToRemove = Path.ChangeExtension(path, suffix); if (File.Exists(fileToRemove)) { File.Delete(fileToRemove); } } } else { string dir = Path.GetDirectoryName(path); string fileName = Path.GetFileNameWithoutExtension(path); string extension = Path.GetExtension(path); path = Path.Combine(dir, fileName + group.Key.ToString() + extension); } } var dbfColumns = info.Columns.Select(column => { DbfColumnType columnType = DbfColumnType.Character; try { columnType = (DbfColumnType)Enum.Parse(typeof(DbfColumnType), column.TypeName); } catch (Exception ex) { GisEditor.LoggerManager.Log(LoggerLevel.Debug, ex.Message, new ExceptionInfo(ex)); } int length = column.MaxLength; if (length > 254) { length = 254; columnType = DbfColumnType.Memo; } else if (length <= 0) { length = 254; } DbfColumn dbfColumn = new DbfColumn(column.ColumnName, columnType, length, GetDecimalLength(columnType, column.MaxLength)); return(dbfColumn); }); try { ConfigureFeatureLayerParameters parameters = new ConfigureFeatureLayerParameters(); foreach (var column in dbfColumns) { parameters.AddedColumns.Add(column); } foreach (var feature in group) { var newFeature = feature; if (!feature.IsValid()) { newFeature = feature.MakeValid(); } Feature validFeature = new Feature(newFeature.GetWellKnownBinary(), newFeature.Id, feature.ColumnValues); //foreach (var item in feature.LinkColumnValues) //{ // validFeature.LinkColumnValues.Add(item.Key, item.Value); //} parameters.AddedFeatures.Add(validFeature); } parameters.LayerUri = new Uri(path); parameters.LongColumnTruncateMode = LongColumnTruncateMode.Truncate; parameters.MemoColumnConvertMode = MemoColumnConvertMode.ToCharacter; switch (group.Key) { case ShapeFileType.Null: case ShapeFileType.Multipatch: default: parameters.WellKnownType = WellKnownType.Invalid; break; case ShapeFileType.Point: case ShapeFileType.PointZ: case ShapeFileType.PointM: case ShapeFileType.Multipoint: case ShapeFileType.MultipointM: case ShapeFileType.MultipointZ: parameters.WellKnownType = WellKnownType.Point; break; case ShapeFileType.Polyline: case ShapeFileType.PolylineZ: case ShapeFileType.PolylineM: parameters.WellKnownType = WellKnownType.Line; break; case ShapeFileType.Polygon: case ShapeFileType.PolygonZ: case ShapeFileType.PolygonM: parameters.WellKnownType = WellKnownType.Polygon; break; } parameters.CustomData["Columns"] = parameters.AddedColumns; parameters.CustomData["CustomizeColumnNames"] = true; parameters.CustomData["EditedColumns"] = info.CostomizedColumnNames; parameters.Proj4ProjectionParametersString = info.ProjectionWkt; var layerPlugin = GisEditor.LayerManager.GetActiveLayerPlugins <ShapeFileFeatureLayerPlugin>().FirstOrDefault(); var layer = layerPlugin.CreateFeatureLayer(parameters); SavePrjFile(path, info.ProjectionWkt); RebuildDbf(path); } catch (Exception ex) { GisEditor.LoggerManager.Log(LoggerLevel.Debug, ex.Message, new ExceptionInfo(ex)); string[] suffixes = { ".shp", ".shx", ".ids", ".idx", ".dbf", ".prj" }; foreach (var suffix in suffixes) { string fileToRemove = Path.ChangeExtension(path, suffix); if (File.Exists(fileToRemove)) { File.Delete(fileToRemove); } } throw new OperationCanceledException("Shapefile generates failed.", ex); } }
private static DetailedReturn ImportTigerData(ISQL toDatabase, DataTypes dataType, string fileToImport, int recordsPerBatch, string referenceURL, string referenceZipFile) { DetailedReturn ret = new DetailedReturn(); // CREATE the IMPORTDETAIL table in the Database if it doesnt already exist toDatabase.CreateImportDetailTable(); // Make sure the file exists that we wish to import... if (File.Exists(fileToImport)) { // Read the dbfFile contents into a DbfTable object using (var dbfTable = new DbfTable(fileToImport, Encoding.UTF8)) { var header = dbfTable.Header; var versionDescription = header.VersionDescription; var hasMemo = dbfTable.Memo != null; var recordCount = header.RecordCount; int rowsAffected = 0; ret.TotalRecordsInFile = recordCount; // Get the IMPORTDETAIL record that matches the URL/File we are going to import ImportDetail importDetail = toDatabase.GetImportDetail(referenceURL); if (importDetail == null) { // If no matching IMPORTDETAIL file was found, create one toDatabase.InsertImportDetails(referenceURL, referenceZipFile, dataType); importDetail = toDatabase.GetImportDetail(referenceURL); //new ImportDetail { URL = referenceURL, LocalFile = referenceZipFile, FileType = dataType.ToString("g"), LastRecordNum = 0 }; } ret.TotalRecordsAlreadyInDB = importDetail.LastRecordNum; // Proceed only if the record count in the file exceeds that which we have already imported if (recordCount > importDetail.LastRecordNum) { // The DataTypes enum name is going to be our TABLE name string tableName = dataType.ToString("g"); // Generate a CREATE TABLE script representing the DbfTable string tableCreate = toDatabase.GetCreateTableScript(dbfTable, tableName); Debug.WriteLine(tableCreate); // CREATE IF NOT EXISTS our TABLE in the DB toDatabase.ExecuteNonQuery(tableCreate); // Create the first part of our INSERT statement string insertHeader = toDatabase.GetInsertHeader(dbfTable, tableName); // Get the records from the DbfTable var dbfRecord = new DbfRecord(dbfTable); // We are going to INSERT multiple records with each DB Command to dramatically speed things up StringBuilder multiInsert = new StringBuilder(); multiInsert.Append(insertHeader); // Loop through each of our records... while (dbfTable.Read(dbfRecord)) { try { // We only want to start INSERTing records where we last left off if (ret.TotalRecordsImported >= importDetail.LastRecordNum) { // Skip the record if it is marked as deleted if (dbfRecord.IsDeleted) { continue; } int col = -1; StringBuilder rowPart = new StringBuilder(); // Loop through each of the values in our record foreach (var dbfValue in dbfRecord.Values) { col++; // Get the column type of this value DbfColumn c = dbfTable.Columns[col]; // Format the value properly for our INSERT statment rowPart.Append(toDatabase.FormatValueForInsert(c.ColumnType, dbfValue.ToString()) + ","); } // Add the Import Detail ID for our last column value rowPart.Append(importDetail.ID); // Append the record values to our INSERT statement multiInsert.Append("(" + rowPart.ToString() + "),"); // If we have collected xxx records... if (ret.TotalRecordsImported % recordsPerBatch == 0) { // Its time to execute the INSERT rowsAffected = toDatabase.ExecuteNonQuery(multiInsert.ToString().TrimEnd(',')); // If the INSERT was successful if (rowsAffected > 0) { // UPDATE the IMPORTDETAILS table toDatabase.UpdateImportDetails(referenceURL, ret.TotalRecordsImported, ret.TotalRecordsImported == recordCount); } // Prepare the next mass INSERT statement multiInsert.Clear(); multiInsert.Append(insertHeader); } } // Update the record count ret.TotalRecordsImported++; } catch (Exception ex) { //Debug.WriteLine(ex.Message); ret.Errors.Add(new ErrorDetail(ErrorTypes.Exception, dataType, ex.Message, referenceURL, fileToImport)); } } ret.TotalRecordsImported -= importDetail.LastRecordNum; // If we have a remaining INSERT compiled.... if (multiInsert.ToString() != "" && multiInsert.ToString() != insertHeader) { // execute the INSERT rowsAffected = toDatabase.ExecuteNonQuery(multiInsert.ToString().TrimEnd(',')); // If the INSERT was successful if (rowsAffected > 0) { // UPDATE the IMPORTDETAILS table toDatabase.UpdateImportDetails(referenceURL, ret.TotalRecordsImported, ret.TotalRecordsImported == recordCount); } } } else // If we already appeared to have imported all the records in this file { toDatabase.UpdateImportDetails(referenceURL, recordCount, true); } } } else // If the file does not exist { ret.Errors.Add(new ErrorDetail(ErrorTypes.BadOrMissingLocalFile, dataType, "File not found", referenceURL, fileToImport)); } return(ret); }