private static object DoBackgroundWorker(BackgroundWorker wrk, DoWorkEventArgs e, params object[] args) { var items = (List <ClassDefinition>)args[0]; var conn = (IServerConnection)args[1]; string featureSource = (string)args[2]; string targetFolder = (string)args[3]; LengthyOperationProgressCallBack cb = (o, pe) => { wrk.ReportProgress(pe.Progress, o); }; var result = new HashSet <string>(); int processed = 0; foreach (var cls in items) { var lyrId = Utility.CreateDefaultLayer(conn, featureSource, cls, targetFolder); if (lyrId != null) { result.Add(lyrId); } processed++; cb(null, new LengthyOperationProgressArgs(string.Empty, (int)((processed / items.Count) * 100))); } return(result); }
private void DoMigrate(IServerConnection source, IServerConnection target, string resourceId, string[] dependentResourceIds, bool overwrite) { var diag = new ProgressDialog(); diag.CancelAbortsThread = true; var method = new ProgressDialog.DoBackgroundWork((worker, e, args) => { var src = (IServerConnection)args[0]; var dst = (IServerConnection)args[1]; var resId = (string)args[2]; var dependents = (string[])args[3]; var overwriteExisting = (bool)args[4]; var cb = new LengthyOperationProgressCallBack((sender, cbe) => { worker.ReportProgress(cbe.Progress, cbe.StatusMessage); }); var migrator = new ResourceMigrator(source, target); migrator.MigrateResource(resId, dependentResourceIds, overwriteExisting, cb); return(true); }); diag.RunOperationAsync(Workbench.Instance, method, source, target, resourceId, dependentResourceIds, overwrite); }
/// <summary> /// Shortcut API to migrate a specific resource to the target connection. Dependent resources are automatically /// migrated as well. This copies all dependent resources of the specified resource. /// </summary> /// <param name="resourceId">The id of the resource to migrate</param> /// <param name="overwrite">If true, all dependent resources that already exist in the target connection are overwritten, otherwise these are not copied over</param> /// <param name="callback">A callback method to indicate progress</param> /// <returns>An array of resource ids that were succesfully migrated</returns> public string[] MigrateResource(string resourceId, bool overwrite, LengthyOperationProgressCallBack callback) { Check.ArgumentNotEmpty(resourceId, nameof(resourceId)); Dictionary <string, string> resIds = new Dictionary <string, string>(); var refList = GetReverseReferences(resourceId); BuildFullDependencyList(resIds, refList); return(MigrateResource(resourceId, new List <string>(resIds.Keys).ToArray(), overwrite, callback)); }
private object ExplodeThemeWorker(BackgroundWorker worker, DoWorkEventArgs e, params object[] args) { var options = (ExplodeThemeOptions)args[0]; LengthyOperationProgressCallBack cb = (s, cbArgs) => { worker.ReportProgress(cbArgs.Progress, cbArgs.StatusMessage); }; Utility.ExplodeThemeIntoFilteredLayers(_edSvc.CurrentConnection, options, cb); return(true); }
private static object DoBackgroundWorker(BackgroundWorker wrk, DoWorkEventArgs e, params object[] args) { var items = (RepositoryItem[])args[0]; var conn = (IServerConnection)args[1]; LengthyOperationProgressCallBack cb = (o, pe) => { wrk.ReportProgress(pe.Progress, o); }; var upRefs = new HashSet <string>(); var downRefs = new HashSet <string>(); var selResources = new HashSet <string>(); foreach (var ri in items) { selResources.Add(ri.ResourceId); ProcessUpstreamDependencies(upRefs, ri.ResourceId, conn.ResourceService); ProcessDownstreamDependencies(downRefs, ri.ResourceId, conn.ResourceService); } return(new DependencySet(selResources, downRefs, upRefs)); }
private void DoMigrate(IServerConnection source, IServerConnection target, string resourceId, string[] dependentResourceIds, bool overwrite) { var diag = new ProgressDialog(); diag.CancelAbortsThread = true; var method = new ProgressDialog.DoBackgroundWork((worker, e, args) => { var src = (IServerConnection)args[0]; var dst = (IServerConnection)args[1]; var resId = (string)args[2]; var dependents = (string[])args[3]; var overwriteExisting = (bool)args[4]; var cb = new LengthyOperationProgressCallBack((sender, cbe) => { worker.ReportProgress(cbe.Progress, cbe.StatusMessage); }); var migrator = new ResourceMigrator(source, target); migrator.MigrateResource(resId, dependentResourceIds, overwriteExisting, cb); return true; }); diag.RunOperationAsync(Workbench.Instance, method, source, target, resourceId, dependentResourceIds, overwrite); }
private static int DoMigrate(OSGeo.MapGuide.MaestroAPI.IServerConnection source, OSGeo.MapGuide.MaestroAPI.IServerConnection target, CopyMoveToServerDialog migrate) { var diag = new ProgressDialog(); diag.CancelAbortsThread = true; var method = new ProgressDialog.DoBackgroundWork((worker, e, args) => { var src = (IServerConnection)args[0]; var dst = (IServerConnection)args[1]; var ids = (string[])args[2]; var folder = (string)args[3]; var overwrite = (bool)args[4]; var act = (MigrationAction)args[5]; var cb = new LengthyOperationProgressCallBack((sender, cbe) => { worker.ReportProgress(cbe.Progress, cbe.StatusMessage); }); var migrator = new ResourceMigrator(source, target); int affected = 0; switch (act) { case MigrationAction.Copy: affected = migrator.CopyResources(ids, folder, overwrite, cb); break; case MigrationAction.Move: affected = migrator.MoveResources(ids, folder, overwrite, cb); break; } return(affected); }); return((int)diag.RunOperationAsync(Workbench.Instance, method, source, target, migrate.SourceResourceIds, migrate.TargetFolder, migrate.OverwriteResources, migrate.SelectedAction)); }
private string[] ExecuteShpLoadProcedure(LengthyOperationProgressCallBack cb, IShpLoadProcedure shpl, ref bool firstExecution) { List<string> resCreatedOrUpdated = new List<string>(); var shpFiles = shpl.SourceFile; int pcPerFile = (int)(100 / shpFiles.Count); int current = 0; string root = shpl.RootPath; if (!root.EndsWith("/")) //NOXLATE root += "/"; //NOXLATE string sdp = shpl.SpatialDataSourcesPath; string lp = shpl.LayersPath; if (!string.IsNullOrEmpty(sdp)) { if (!sdp.EndsWith("/")) //NOXLATE sdp += "/"; //NOXLATE } if (!string.IsNullOrEmpty(lp)) { if (!lp.EndsWith("/")) //NOXLATE lp += "/"; //NOXLATE } string fsRoot = (string.IsNullOrEmpty(sdp) ? root : sdp) + shpl.SpatialDataSourcesFolder; string layerRoot = (string.IsNullOrEmpty(lp) ? root : lp) + shpl.LayersFolder; if (!fsRoot.EndsWith("/")) //NOXLATE fsRoot += "/"; //NOXLATE if (!layerRoot.EndsWith("/")) //NOXLATE layerRoot += "/"; //NOXLATE List<string> resToUpdate = new List<string>(); if (shpl.ResourceId != null) { resToUpdate.AddRange(shpl.ResourceId); firstExecution = false; } else { firstExecution = true; } Dictionary<string, List<string>> extraFiles = new Dictionary<string, List<string>>(); //Unlike SDF, a SHP file actually consists of multiple files foreach (string shp in shpFiles) { if (!extraFiles.ContainsKey(shp)) extraFiles[shp] = new List<string>(); //we want to preserve casing for everything before the extension string prefix = shp.Substring(0, shp.LastIndexOf(".") + 1); //NOXLATE extraFiles[shp].Add(prefix + "shx"); //NOXLATE extraFiles[shp].Add(prefix + "dbf"); //NOXLATE extraFiles[shp].Add(prefix + "idx"); //NOXLATE extraFiles[shp].Add(prefix + "prj"); //NOXLATE extraFiles[shp].Add(prefix + "cpg"); //NOXLATE //TODO: Are we missing anything else? } foreach (string file in shpFiles) { bool success = false; if (System.IO.File.Exists(file)) { string resName = System.IO.Path.GetFileNameWithoutExtension(file); string dataName = System.IO.Path.GetFileName(file); string fsId = fsRoot + resName + ".FeatureSource"; //NOXLATE string lyrId = layerRoot + resName + ".LayerDefinition"; //NOXLATE if (shpl.GenerateSpatialDataSources) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(fsId)); if (!skip) { //Process is as follows: // // 1. Create and save feature source document. // 2. Upload sdf file as resource data for this document. // 3. Test the connection, it should check out. // 4. If no spatial contexts are detected, assign a default one from the load procedure and save the modified feature source. //Step 1: Create feature source document var conp = new NameValueCollection(); conp["DefaultFileLocation"] = StringConstants.MgDataFilePath + dataName; //NOXLATE var fs = ObjectFactory.CreateFeatureSource(this.Parent, "OSGeo.SHP", conp); //NOXLATE fs.ResourceID = fsId; this.Parent.ResourceService.SaveResource(fs); resCreatedOrUpdated.Add(fsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, fsId), current)); //TODO: When the infrastructure is available to us (ie. A portable .net FDO/MG Feature Service API wrapper) //Maybe then we can actually implement the generalization and conversion properties. Until then, we skip //these options //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(fsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, file), current)); //Load supplementary files foreach (string extraFile in extraFiles[file]) { string dn = System.IO.Path.GetFileName(extraFile); if (System.IO.File.Exists(extraFile)) { this.Parent.ResourceService.SetResourceData(fsId, dn, ResourceDataType.File, System.IO.File.OpenRead(extraFile)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, extraFile), current)); } } //Step 3: Test to make sure we're all good so far string result = this.Parent.FeatureService.TestConnection(fsId); if (Utility.IsSuccessfulConnectionTestResult(result)) { //Step 4: Test to see if default cs needs to be specified FdoSpatialContextList spatialContexts = this.Parent.FeatureService.GetSpatialContextInfo(fsId, false); if (!string.IsNullOrEmpty(shpl.CoordinateSystem)) { bool hasPrj = false; //If there is no prj file, we can just upload one with the specified WKT foreach (var resd in fs.EnumerateResourceData()) { if (resd.Name == resName + ".prj") //NOXLATE { hasPrj = true; break; } } //Case 1: No .prj file. Most probable if (!hasPrj) { string tmp = System.IO.Path.GetTempFileName(); System.IO.File.WriteAllText(tmp, shpl.CoordinateSystem); using (var fsr = System.IO.File.OpenRead(tmp)) { fs.SetResourceData(resName + ".prj", ResourceDataType.File, fsr); //NOXLATE cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateUploadedPrj, resName), current)); } try { System.IO.File.Delete(tmp); } catch { } } else if (spatialContexts.SpatialContext.Count == 0) //Case 2: No Spatial contexts. Declare one using SupplementalContextInfo { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource_1_0_0.SpatialContextType() { Name = "Default", //NOXLATE CoordinateSystem = shpl.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } else if (spatialContexts.SpatialContext.Count == 1) //Case 3: One spatial context with blank WKT. Override it using the SupplementalContextInfo { var sc = spatialContexts.SpatialContext[0]; if (string.IsNullOrEmpty(sc.CoordinateSystemWkt)) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource_1_0_0.SpatialContextType() { Name = sc.Name, CoordinateSystem = shpl.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } } } } } } if (shpl.GenerateLayers) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(lyrId)); if (!skip) { //NOTE: Because we are working against 1.0.0 object types this will always create 1.0.0 Layer Definition //resources //Process is as follows // // 1. Describe the schema of the feature source // 2. If it contains at least one feature class, create a layer definition // 3. Set the following layer definition properties: // - Feature Source: the feature source id // - Feature Class: the first feature class in the schema // - Geometry: the first geometry property in the first feature class // 4. Infer the supported geometry types for this feature class. Toggle supported styles accordingly. //Step 1: Describe the schema // //NOTE: I think we can get away with the full schema walk here. It's very unlikely we will be uploading a flat //file with hundreds of classes. Even then, flat-file schema walk performance blows RDBMS walking performance //out of the water anyway. FeatureSourceDescription desc = this.Parent.FeatureService.DescribeFeatureSource(fsId); //Step 2: Find the first feature class with a geometry property ClassDefinition clsDef = null; GeometricPropertyDefinition geom = null; bool done = false; foreach (ClassDefinition cls in desc.AllClasses) { if (done) break; foreach (PropertyDefinition prop in cls.Properties) { if (done) break; if (prop.Type == OSGeo.MapGuide.MaestroAPI.Schema.PropertyDefinitionType.Geometry) { clsDef = cls; geom = (GeometricPropertyDefinition)prop; done = true; } } } if (clsDef != null && geom != null) { var ld = ObjectFactory.CreateDefaultLayer(this.Parent, LayerType.Vector, new Version(1, 0, 0)); //Step 3: Assign default properties ld.ResourceID = lyrId; var vld = ld.SubLayer as IVectorLayerDefinition; vld.ResourceId = fsId; vld.FeatureName = clsDef.QualifiedName; vld.Geometry = geom.Name; //Step 4: Infer geometry storage support and remove unsupported styles var scale = vld.GetScaleRangeAt(0); var geomTypes = geom.GetIndividualGeometricTypes(); var remove = new List<string>(); if (Array.IndexOf(geomTypes, FeatureGeometricType.Point) < 0) { remove.Add(FeatureGeometricType.Point.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Curve) < 0) { remove.Add(FeatureGeometricType.Curve.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Surface) < 0) { remove.Add(FeatureGeometricType.Surface.ToString().ToLower()); } scale.RemoveStyles(remove); this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } } success = true; } else { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileNotFound, file), current)); } //This file is now fully processed, so increment progress current += pcPerFile; if (success) { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSuccess, file), current)); } } return resCreatedOrUpdated.ToArray(); }
private string[] ExecuteBaseProcedure(LengthyOperationProgressCallBack cb, IBaseLoadProcedure proc, ref bool firstExecution) { List<string> resCreatedOrUpdated = new List<string>(); var files = proc.SourceFile; int pcPerFile = (int)(100 / files.Count); int current = 0; string root = proc.RootPath; if (!root.EndsWith("/")) //NOXLATE root += "/"; //NOXLATE string sdp = proc.SpatialDataSourcesPath; string lp = proc.LayersPath; if (!string.IsNullOrEmpty(sdp)) { if (!sdp.EndsWith("/")) //NOXLATE sdp += "/"; //NOXLATE } if (!string.IsNullOrEmpty(lp)) { if (!lp.EndsWith("/")) //NOXLATE lp += "/"; //NOXLATE } string fsRoot = (string.IsNullOrEmpty(sdp) ? root : sdp) + proc.SpatialDataSourcesFolder; string layerRoot = (string.IsNullOrEmpty(lp) ? root : lp) + proc.LayersFolder; if (!fsRoot.EndsWith("/")) //NOXLATE fsRoot += "/"; //NOXLATE if (!layerRoot.EndsWith("/")) //NOXLATE layerRoot += "/"; //NOXLATE List<string> resToUpdate = new List<string>(); if (proc.ResourceId != null && proc.ResourceId.Count > 0) { resToUpdate.AddRange(proc.ResourceId); firstExecution = false; } else { firstExecution = true; } foreach (string file in files) { bool success = false; if (System.IO.File.Exists(file)) { //GOTCHA: We are assuming these SDF files are not SDF2 files. This is //because there is no multi-platform solution to convert SDF2 files to SDF3 string resName = System.IO.Path.GetFileNameWithoutExtension(file); string dataName = System.IO.Path.GetFileName(file); string dsId = fsRoot + resName + ".DrawingSource"; //NOXLATE string fsId = fsRoot + resName + ".FeatureSource"; //NOXLATE string lyrId = layerRoot + resName + ".LayerDefinition"; //NOXLATE if (proc.GenerateSpatialDataSources) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(fsId)); if (!skip) { if (proc.Type == LoadType.Dwf) { //Process is as follows: // // 1. Create and save drawing source document. // 2. Upload dwf file as resource data for this document. //Step 1: Create and save drawing source document. IDrawingSource ds = ObjectFactory.CreateDrawingSource(this.Parent); ds.SourceName = dataName; ds.CoordinateSpace = proc.CoordinateSystem; ds.ResourceID = dsId; this.Parent.ResourceService.SaveResource(ds); resCreatedOrUpdated.Add(dsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, dsId), current)); //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(dsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, file), current)); ds.RegenerateSheetList(); this.Parent.ResourceService.SaveResource(ds); ds.UpdateExtents(); this.Parent.ResourceService.SaveResource(ds); } else { //Process is as follows: // // 1. Create and save feature source document. // 2. Upload sdf file as resource data for this document. // 3. Test the connection, it should check out. // 4. If no spatial contexts are detected, assign a default one from the load procedure and save the modified feature source. //Step 1: Create feature source document string provider = "OSGeo.SDF"; //NOXLATE switch (proc.Type) { case LoadType.Sqlite: provider = "OSGeo.SQLite"; //NOXLATE break; } var conp = new NameValueCollection(); conp["File"] = StringConstants.MgDataFilePath + dataName; var fs = ObjectFactory.CreateFeatureSource(this.Parent, provider, conp); fs.ResourceID = fsId; this.Parent.ResourceService.SaveResource(fs); resCreatedOrUpdated.Add(fsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, fsId), current)); //TODO: When the infrastructure is available to us (ie. A portable .net FDO/MG Feature Service API wrapper) //Maybe then we can actually implement the generalization and duplicate record handling properties. Until then, we skip //these options //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(fsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, file), current)); //Step 3: Test to make sure we're all good so far string result = this.Parent.FeatureService.TestConnection(fsId); //LocalNativeConnection returns this string, so I'm assuming this is the "success" result if (result == "No errors" || result.ToLower() == "true") //NOXLATE { //Step 4: Test to see if default cs needs to be specified FdoSpatialContextList spatialContexts = this.Parent.FeatureService.GetSpatialContextInfo(fsId, false); if (!string.IsNullOrEmpty(proc.CoordinateSystem)) { //Case 1: No spatial contexts. Register one using SupplementalContextInfo if (spatialContexts.SpatialContext.Count == 0) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource_1_0_0.SpatialContextType() { Name = "Default", //NOXLATE CoordinateSystem = proc.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } else if (spatialContexts.SpatialContext.Count == 1) //Case 2: One spatial context, but its WKT is blank. Override using SupplementalContextInfo { var sc = spatialContexts.SpatialContext[0]; if (string.IsNullOrEmpty(sc.CoordinateSystemWkt)) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource_1_0_0.SpatialContextType() { Name = sc.Name, CoordinateSystem = proc.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } } } } } } } if (proc.GenerateLayers) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(lyrId)); if (!skip) { if (proc.Type == LoadType.Dwf) { //Process is as follows // // 1. Enumerate the sheets on the drawing source // 2. Set the referenced sheet to the first known sheet var dwSvc = (IDrawingService)Parent.GetService((int)ServiceType.Drawing); var list = dwSvc.EnumerateDrawingSections(dsId); if (list.Section.Count > 0) { //Create drawing layer var ld = ObjectFactory.CreateDefaultLayer(this.Parent, LayerType.Drawing, new Version(1, 0, 0)); var dl = ld.SubLayer as IDrawingLayerDefinition; dl.ResourceId = dsId; //Use the first one dl.Sheet = list.Section[0].Name; ld.ResourceID = lyrId; this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } else { //NOTE: Because we are working against 1.0.0 object types this will always create 1.0.0 Layer Definition //resources //Process is as follows // // 1. Describe the schema of the feature source // 2. If it contains at least one feature class, create a layer definition // 3. Set the following layer definition properties: // - Feature Source: the feature source id // - Feature Class: the first feature class in the schema // - Geometry: the first geometry property in the first feature class // 4. Infer the supported geometry types for this feature class. Toggle supported styles accordingly. //Step 1: Describe the schema // //NOTE: I think we can get away with the full schema walk here. It's very unlikely we will be uploading a flat //file with hundreds of classes. Even then, flat-file schema walk performance blows RDBMS walking performance //out of the water anyway. FeatureSourceDescription desc = this.Parent.FeatureService.DescribeFeatureSource(fsId); if (desc.HasClasses()) { //Step 2: Find the first feature class with a geometry property ClassDefinition clsDef = null; GeometricPropertyDefinition geom = null; bool done = false; foreach (ClassDefinition cls in desc.AllClasses) { if (done) break; foreach (PropertyDefinition prop in cls.Properties) { if (done) break; if (prop.Type == OSGeo.MapGuide.MaestroAPI.Schema.PropertyDefinitionType.Geometry) { clsDef = cls; geom = (GeometricPropertyDefinition)prop; done = true; } } } if (clsDef != null && geom != null) { var ld = ObjectFactory.CreateDefaultLayer(this.Parent, LayerType.Vector, new Version(1, 0, 0)); //Step 3: Assign default properties ld.ResourceID = lyrId; var vld = ld.SubLayer as IVectorLayerDefinition; vld.ResourceId = fsId; vld.FeatureName = clsDef.QualifiedName; vld.Geometry = geom.Name; //Step 4: Infer geometry storage support and remove unsupported styles var geomTypes = geom.GetIndividualGeometricTypes(); var scale = vld.GetScaleRangeAt(0); var remove = new List<string>(); if (Array.IndexOf(geomTypes, FeatureGeometricType.Point) < 0) { remove.Add(FeatureGeometricType.Point.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Curve) < 0) { remove.Add(FeatureGeometricType.Curve.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Surface) < 0) { remove.Add(FeatureGeometricType.Surface.ToString().ToLower()); } scale.RemoveStyles(remove); this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } } } } success = true; } else { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileNotFound, file), current)); } //This file is now fully processed, so increment progress current += pcPerFile; if (success) { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileProcessed, file), current)); } } return resCreatedOrUpdated.ToArray(); }
/// <summary> /// Executes the specified load procedure. Only SDF and SHP load procedures are supported. /// Also note that the following load procedure features are ignored during execution: /// - Generalization of data /// - Conversion from SHP to SDF /// - SDF2 to SDF3 conversion /// - SDF3 duplicate key handling /// </summary> /// <param name="proc">The proc.</param> /// <param name="callback">The callback.</param> /// <returns> /// A list of resource IDs that were created from the execution of this load procedure /// </returns> public string[] Execute(ILoadProcedure proc, LengthyOperationProgressCallBack callback) { //TODO: Localize callback messages //TODO: Localize exception messages //TODO: This would currently overwrite everything. In reality, the load procedure has //a list of resource ids which are overwritable, anything not on the list is untouchable. //I presume if this list is empty, then everything is overwritten and the resource list //list is then assigned to the load procedure, which is then updated so that on subsequent runs, //only resources in the list are overwritten instead of everything. string[] resourcesCreatedOrUpdated = null; LengthyOperationProgressCallBack cb = callback; //Assign dummy callback if none specified if (cb == null) cb = delegate { }; //bool loadProcedureUpdated = false; //bool updateGeneratedResourceIds = false; //TODO: SDF and SHP load procedures share lots of common logic. Merge the two //once everything's all good. var type = proc.SubType.Type; if (type == LoadType.Dwg || type == LoadType.Raster) throw new NotSupportedException(Strings.UnsupportedLoadProcedureType); var sproc = (IBaseLoadProcedure)proc.SubType; bool firstExecute = true; if (type == LoadType.Shp) { var shpl = (IShpLoadProcedure)sproc; if (!this.IgnoreUnsupportedFeatures) { //Anything less than 100% implies use of generalization if (shpl.Generalization < 100.0) { throw new NotSupportedException(Strings.LPROC_GeneralizationNotSupported); } //Can't do this because we don't have a portable .net FDO/MG Feature Service if (shpl.ConvertToSdf) { throw new NotSupportedException(Strings.LPROC_ConvertToSdf3NotSupported); } } resourcesCreatedOrUpdated = ExecuteShpLoadProcedure(cb, shpl, ref firstExecute); } else { if (!this.IgnoreUnsupportedFeatures) { CheckUnsupportedFeatures(sproc); } resourcesCreatedOrUpdated = ExecuteBaseProcedure(cb, sproc, ref firstExecute); } //Update the generated resources list if this is the first execution if (firstExecute) { sproc.ResourceId.Clear(); foreach (var it in resourcesCreatedOrUpdated) { sproc.ResourceId.Add(it); } //Before we'd save here, but instead let's mark the resource as dirty from the user side } return resourcesCreatedOrUpdated; }
private string[] ExecuteShpLoadProcedure(LengthyOperationProgressCallBack cb, IShpLoadProcedure shpl, ref bool firstExecution) { List <string> resCreatedOrUpdated = new List <string>(); var shpFiles = shpl.SourceFile; int pcPerFile = (int)(100 / shpFiles.Count); int current = 0; string root = shpl.RootPath; if (!root.EndsWith("/")) //NOXLATE { root += "/"; //NOXLATE } string sdp = shpl.SpatialDataSourcesPath; string lp = shpl.LayersPath; if (!string.IsNullOrEmpty(sdp)) { if (!sdp.EndsWith("/")) //NOXLATE { sdp += "/"; //NOXLATE } } if (!string.IsNullOrEmpty(lp)) { if (!lp.EndsWith("/")) //NOXLATE { lp += "/"; //NOXLATE } } string fsRoot = (string.IsNullOrEmpty(sdp) ? root : sdp) + shpl.SpatialDataSourcesFolder; string layerRoot = (string.IsNullOrEmpty(lp) ? root : lp) + shpl.LayersFolder; if (!fsRoot.EndsWith("/")) //NOXLATE { fsRoot += "/"; //NOXLATE } if (!layerRoot.EndsWith("/")) //NOXLATE { layerRoot += "/"; //NOXLATE } List <string> resToUpdate = new List <string>(); if (shpl.ResourceId != null) { resToUpdate.AddRange(shpl.ResourceId); firstExecution = false; } else { firstExecution = true; } Dictionary <string, List <string> > extraFiles = new Dictionary <string, List <string> >(); //Unlike SDF, a SHP file actually consists of multiple files foreach (string shp in shpFiles) { if (!extraFiles.ContainsKey(shp)) { extraFiles[shp] = new List <string>(); } //we want to preserve casing for everything before the extension string prefix = shp.Substring(0, shp.LastIndexOf(".") + 1); //NOXLATE extraFiles[shp].Add(prefix + "shx"); //NOXLATE extraFiles[shp].Add(prefix + "dbf"); //NOXLATE extraFiles[shp].Add(prefix + "idx"); //NOXLATE extraFiles[shp].Add(prefix + "prj"); //NOXLATE extraFiles[shp].Add(prefix + "cpg"); //NOXLATE //TODO: Are we missing anything else? } foreach (string file in shpFiles) { bool success = false; if (System.IO.File.Exists(file)) { string resName = System.IO.Path.GetFileNameWithoutExtension(file); string dataName = System.IO.Path.GetFileName(file); string fsId = fsRoot + resName + ".FeatureSource"; //NOXLATE string lyrId = layerRoot + resName + ".LayerDefinition"; //NOXLATE if (shpl.GenerateSpatialDataSources) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(fsId)); if (!skip) { //Process is as follows: // // 1. Create and save feature source document. // 2. Upload sdf file as resource data for this document. // 3. Test the connection, it should check out. // 4. If no spatial contexts are detected, assign a default one from the load procedure and save the modified feature source. //Step 1: Create feature source document var conp = new NameValueCollection(); conp["DefaultFileLocation"] = StringConstants.MgDataFilePath + dataName; //NOXLATE var fs = ObjectFactory.CreateFeatureSource("OSGeo.SHP", conp); //NOXLATE fs.ResourceID = fsId; this.Parent.ResourceService.SaveResource(fs); resCreatedOrUpdated.Add(fsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, fsId), current)); //TODO: When the infrastructure is available to us (ie. A portable .net FDO/MG Feature Service API wrapper) //Maybe then we can actually implement the generalization and conversion properties. Until then, we skip //these options //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(fsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, file), current)); //Load supplementary files foreach (string extraFile in extraFiles[file]) { string dn = System.IO.Path.GetFileName(extraFile); if (System.IO.File.Exists(extraFile)) { this.Parent.ResourceService.SetResourceData(fsId, dn, ResourceDataType.File, System.IO.File.OpenRead(extraFile)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, extraFile), current)); } } //Step 3: Test to make sure we're all good so far string result = this.Parent.FeatureService.TestConnection(fsId); if (Utility.IsSuccessfulConnectionTestResult(result)) { //Step 4: Test to see if default cs needs to be specified FdoSpatialContextList spatialContexts = this.Parent.FeatureService.GetSpatialContextInfo(fsId, false); if (!string.IsNullOrEmpty(shpl.CoordinateSystem)) { bool hasPrj = false; //If there is no prj file, we can just upload one with the specified WKT var resData = this.Parent.ResourceService.EnumerateResourceData(fs.ResourceID); foreach (var resd in resData.ResourceData) { if (resd.Name == resName + ".prj") //NOXLATE { hasPrj = true; break; } } //Case 1: No .prj file. Most probable if (!hasPrj) { string tmp = System.IO.Path.GetTempFileName(); System.IO.File.WriteAllText(tmp, shpl.CoordinateSystem); using (var fsr = System.IO.File.OpenRead(tmp)) { this.Parent.ResourceService.SetResourceData(fs.ResourceID, resName + ".prj", ResourceDataType.File, fsr); //NOXLATE cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateUploadedPrj, resName), current)); } try { System.IO.File.Delete(tmp); } catch { } } else if (spatialContexts.SpatialContext.Count == 0) //Case 2: No Spatial contexts. Declare one using SupplementalContextInfo { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource.v1_0_0.SpatialContextType() { Name = "Default", //NOXLATE CoordinateSystem = shpl.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } else if (spatialContexts.SpatialContext.Count == 1) //Case 3: One spatial context with blank WKT. Override it using the SupplementalContextInfo { var sc = spatialContexts.SpatialContext[0]; if (string.IsNullOrEmpty(sc.CoordinateSystemWkt)) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource.v1_0_0.SpatialContextType() { Name = sc.Name, CoordinateSystem = shpl.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } } } } } } if (shpl.GenerateLayers) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(lyrId)); if (!skip) { //NOTE: Because we are working against 1.0.0 object types this will always create 1.0.0 Layer Definition //resources //Process is as follows // // 1. Describe the schema of the feature source // 2. If it contains at least one feature class, create a layer definition // 3. Set the following layer definition properties: // - Feature Source: the feature source id // - Feature Class: the first feature class in the schema // - Geometry: the first geometry property in the first feature class // 4. Infer the supported geometry types for this feature class. Toggle supported styles accordingly. //Step 1: Describe the schema // //NOTE: I think we can get away with the full schema walk here. It's very unlikely we will be uploading a flat //file with hundreds of classes. Even then, flat-file schema walk performance blows RDBMS walking performance //out of the water anyway. FeatureSourceDescription desc = this.Parent.FeatureService.DescribeFeatureSource(fsId); //Step 2: Find the first feature class with a geometry property ClassDefinition clsDef = null; GeometricPropertyDefinition geom = null; bool done = false; foreach (ClassDefinition cls in desc.AllClasses) { if (done) { break; } foreach (PropertyDefinition prop in cls.Properties) { if (done) { break; } if (prop.Type == OSGeo.MapGuide.MaestroAPI.Schema.PropertyDefinitionType.Geometry) { clsDef = cls; geom = (GeometricPropertyDefinition)prop; done = true; } } } if (clsDef != null && geom != null) { var ld = ObjectFactory.CreateDefaultLayer(LayerType.Vector, new Version(1, 0, 0)); //Step 3: Assign default properties ld.ResourceID = lyrId; var vld = ld.SubLayer as IVectorLayerDefinition; vld.ResourceId = fsId; vld.FeatureName = clsDef.QualifiedName; vld.Geometry = geom.Name; //Step 4: Infer geometry storage support and remove unsupported styles var scale = vld.GetScaleRangeAt(0); var geomTypes = geom.GetIndividualGeometricTypes(); var remove = new List <string>(); if (Array.IndexOf(geomTypes, FeatureGeometricType.Point) < 0) { remove.Add(FeatureGeometricType.Point.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Curve) < 0) { remove.Add(FeatureGeometricType.Curve.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Surface) < 0) { remove.Add(FeatureGeometricType.Surface.ToString().ToLower()); } scale.RemoveStyles(remove); this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } } success = true; } else { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileNotFound, file), current)); } //This file is now fully processed, so increment progress current += pcPerFile; if (success) { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSuccess, file), current)); } } return(resCreatedOrUpdated.ToArray()); }
/// <summary> /// Migrates a specific resource (and its dependent resources) to the target connection /// </summary> /// <param name="resourceId">The id of the resource to migrate</param> /// <param name="dependentResourceIds">The array of dependent resource ids</param> /// <param name="overwrite">If true, all dependent resources that already exist in the target connection are overwritten, otherwise these are not copied over</param> /// <param name="callback">A callback method to indicate progress</param> /// <returns>An array of resource ids that were succesfully migrated</returns> public string[] MigrateResource(string resourceId, string[] dependentResourceIds, bool overwrite, LengthyOperationProgressCallBack callback) { Check.ArgumentNotEmpty(resourceId, nameof(resourceId)); Check.ArgumentNotNull(dependentResourceIds, nameof(dependentResourceIds)); //TODO: Figure out a more elegant strategy of handling saving resources //to older versions (downgrading?) //TODO: This should not return a string array, it should return an array //of migration results. This requires a new API (Capability?) to test whether a resource //can be saved to this connection List <string> migrated = new List <string>(); LengthyOperationProgressCallBack cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((o, a) => { }); } var targetCaps = _target.Capabilities; int total = dependentResourceIds.Length + 1; int unit = 100 / total; int progress = 0; try { //Copy the specified resource IResource res = _source.ResourceService.GetResource(resourceId); //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, resourceId, maxVer), progress)); } _target.ResourceService.SaveResource(res); //Copy its resource data var resData = _source.ResourceService.EnumerateResourceData(res.ResourceID); foreach (var data in resData.ResourceData) { using (var stream = _source.ResourceService.GetResourceData(res.ResourceID, data.Name)) { if (!stream.CanSeek) { using (var ms = MemoryStreamPool.GetStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(resourceId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(resourceId, data.Name, data.Type, stream); } } } migrated.Add(resourceId); } catch //This happens if we're saving a resource to an older version where this resource version does not exist { } //If the first one failed, abort early. Don't bother with the rest if (migrated.Count == 1) { progress += unit; cb(this, new LengthyOperationProgressArgs(string.Format(Strings.CopiedResource, resourceId), progress)); //Now copy dependents foreach (var resId in dependentResourceIds) { bool existsOnTarget = _target.ResourceService.ResourceExists(resId); if ((existsOnTarget && overwrite) || !existsOnTarget) { try { //Copy the specified resource IResource res = _source.ResourceService.GetResource(resId); _target.ResourceService.SaveResource(res); //Copy its resource data var resData = _source.ResourceService.EnumerateResourceData(res.ResourceID); foreach (var data in resData.ResourceData) { using (var stream = _source.ResourceService.GetResourceData(res.ResourceID, data.Name)) { if (!stream.CanSeek) { using (var ms = MemoryStreamPool.GetStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(resId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(resId, data.Name, data.Type, stream); } } } migrated.Add(resId); } catch //This happens if we're saving a resource to an older version where this resource version does not exist { } progress += unit; cb(this, new LengthyOperationProgressArgs(string.Format(Strings.CopiedResource, resId), progress)); } } } return(migrated.ToArray()); }
/// <summary> /// Copies resource from the source connection to another connection. /// </summary> /// <param name="sourceResourceIds">The array of source resource ids</param> /// <param name="targetResourceIds">The array of target resource ids to copy to. Each resource id in the source array will be copied to the corresponding resource id in the target array</param> /// <param name="overwrite">Indicates whether to overwrite </param> /// <param name="options">Re-base options</param> /// <param name="callback"></param> /// <returns></returns> public string[] CopyResources(string[] sourceResourceIds, string[] targetResourceIds, bool overwrite, RebaseOptions options, LengthyOperationProgressCallBack callback) { Check.ArgumentNotNull(sourceResourceIds, nameof(sourceResourceIds)); Check.ArgumentNotNull(targetResourceIds, nameof(targetResourceIds)); Check.ThatPreconditionIsMet(sourceResourceIds.Length == targetResourceIds.Length, $"{nameof(sourceResourceIds)}.Length == {nameof(targetResourceIds)}.Length"); var copiedItems = new List <string>(); var cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((s, e) => { //Do nothing }); } var targetCaps = _target.Capabilities; int copied = 0; int unit = 100 / sourceResourceIds.Length; int progress = 0; string message = string.Empty; for (int i = 0; i < sourceResourceIds.Length; i++) { var srcResId = sourceResourceIds[i]; var dstResId = targetResourceIds[i]; //Get the source resource object IResource res = _source.ResourceService.GetResource(srcResId); //Skip if target exists and overwrite is not specified if (!overwrite && _target.ResourceService.ResourceExists(dstResId)) { progress += unit; continue; } else { //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, srcResId, maxVer), progress)); } //Now rebase if rebase options supplied if (options != null) { var rebaser = new ResourceRebaser(res); res = rebaser.Rebase(options.SourceFolder, options.TargetFolder); } //Save resource _target.ResourceService.SaveResourceAs(res, dstResId); //Copy resource data var resData = _source.ResourceService.EnumerateResourceData(res.ResourceID); foreach (var data in resData.ResourceData) { using (var stream = _source.ResourceService.GetResourceData(res.ResourceID, data.Name)) { if (!stream.CanSeek) { using (var ms = MemoryStreamPool.GetStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(dstResId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(dstResId, data.Name, data.Type, stream); } } } copied++; message = string.Format(Strings.CopiedResourceToTarget, srcResId, dstResId); } copiedItems.Add(srcResId); progress += unit; cb(this, new LengthyOperationProgressArgs(message, progress)); } return(copiedItems.ToArray()); }
public string[] ExecuteLoadProcedure(OSGeo.MapGuide.ObjectModels.LoadProcedure.ILoadProcedure loadProc, LengthyOperationProgressCallBack callback, bool ignoreUnsupportedFeatures) { throw new NotImplementedException(); }
public string[] ExecuteLoadProcedure(string resourceID, LengthyOperationProgressCallBack callback, bool ignoreUnsupportedFeatures) { throw new NotImplementedException(); }
/// <summary> /// Shortcut API to migrate a specific resource to the target connection. Dependent resources are automatically /// migrated as well. This copies all dependent resources of the specified resource. /// </summary> /// <param name="resourceId">The id of the resource to migrate</param> /// <param name="overwrite">If true, all dependent resources that already exist in the target connection are overwritten, otherwise these are not copied over</param> /// <param name="callback">A callback method to indicate progress</param> /// <returns>An array of resource ids that were succesfully migrated</returns> public string[] MigrateResource(string resourceId, bool overwrite, LengthyOperationProgressCallBack callback) { Check.NotEmpty(resourceId, "resourceId"); //NOXLATE Dictionary<string, string> resIds = new Dictionary<string, string>(); var refList = GetReverseReferences(resourceId); BuildFullDependencyList(resIds, refList); return MigrateResource(resourceId, new List<string>(resIds.Keys).ToArray(), overwrite, callback); }
/// <summary> /// Moves resources from the source connection to the specified folder on the target connection. Folder structure of the source is discarded /// </summary> /// <param name="resourceIds"></param> /// <param name="folderId"></param> /// <param name="overwrite"></param> /// <param name="callback"></param> /// <returns></returns> public int MoveResources(string[] resourceIds, string folderId, bool overwrite, LengthyOperationProgressCallBack callback) { Check.NotNull(resourceIds, "resourceIds"); //NOXLATE Check.NotEmpty(folderId, "folderId"); //NOXLATE var cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((s, e) => { //Do nothing }); } var targetCaps = _target.Capabilities; int moved = 0; int unit = 100 / resourceIds.Length; int progress = 0; foreach (var resId in resourceIds) { string targetId = folderId + ResourceIdentifier.GetName(resId) + "." + ResourceIdentifier.GetResourceType(resId); //NOXLATE string message = string.Empty; IResource res = _source.ResourceService.GetResource(resId); //Skip if target exists and overwrite is not specified if (!overwrite && _target.ResourceService.ResourceExists(targetId)) { progress += unit; continue; } else { //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, resId, maxVer), progress)); } //Save resource _target.ResourceService.SaveResourceAs(res, targetId); //Copy resource data foreach (var data in res.EnumerateResourceData()) { using (var stream = res.GetResourceData(data.Name)) { if (!stream.CanSeek) { using (var ms = new MemoryStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(targetId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(targetId, data.Name, data.Type, stream); } } } moved++; _source.ResourceService.DeleteResource(resId); message = string.Format(Strings.CopiedResource, resId); } progress += unit; cb(this, new LengthyOperationProgressArgs(message, progress)); } return moved; }
/// <summary> /// Migrates a specific resource (and its dependent resources) to the target connection /// </summary> /// <param name="resourceId">The id of the resource to migrate</param> /// <param name="dependentResourceIds">The array of dependent resource ids</param> /// <param name="overwrite">If true, all dependent resources that already exist in the target connection are overwritten, otherwise these are not copied over</param> /// <param name="callback">A callback method to indicate progress</param> /// <returns>An array of resource ids that were succesfully migrated</returns> public string[] MigrateResource(string resourceId, string[] dependentResourceIds, bool overwrite, LengthyOperationProgressCallBack callback) { Check.NotEmpty(resourceId, "resourceId"); //NOXLATE Check.NotNull(dependentResourceIds, "dependentResourceIds"); //NOXLATE //TODO: Figure out a more elegant strategy of handling saving resources //to older versions (downgrading?) //TODO: This should not return a string array, it should return an array //of migration results. This requires a new API (Capability?) to test whether a resource //can be saved to this connection List<string> migrated = new List<string>(); LengthyOperationProgressCallBack cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((o, a) => { }); } var targetCaps = _target.Capabilities; int total = dependentResourceIds.Length + 1; int unit = 100 / total; int progress = 0; try { //Copy the specified resource IResource res = _source.ResourceService.GetResource(resourceId); //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, resourceId, maxVer), progress)); } _target.ResourceService.SaveResource(res); //Copy its resource data foreach (var data in res.EnumerateResourceData()) { using (var stream = res.GetResourceData(data.Name)) { if (!stream.CanSeek) { using (var ms = new MemoryStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(resourceId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(resourceId, data.Name, data.Type, stream); } } } migrated.Add(resourceId); } catch //This happens if we're saving a resource to an older version where this resource version does not exist { } //If the first one failed, abort early. Don't bother with the rest if (migrated.Count == 1) { progress += unit; cb(this, new LengthyOperationProgressArgs(string.Format(Strings.CopiedResource, resourceId), progress)); //Now copy dependents foreach (var resId in dependentResourceIds) { bool existsOnTarget = _target.ResourceService.ResourceExists(resId); if ((existsOnTarget && overwrite) || !existsOnTarget) { try { //Copy the specified resource IResource res = _source.ResourceService.GetResource(resId); _target.ResourceService.SaveResource(res); //Copy its resource data foreach (var data in res.EnumerateResourceData()) { using (var stream = res.GetResourceData(data.Name)) { if (!stream.CanSeek) { using (var ms = new MemoryStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(resId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(resId, data.Name, data.Type, stream); } } } migrated.Add(resId); } catch //This happens if we're saving a resource to an older version where this resource version does not exist { } progress += unit; cb(this, new LengthyOperationProgressArgs(string.Format(Strings.CopiedResource, resId), progress)); } } } return migrated.ToArray(); }
/// <summary> /// Copies resource from the source connection to another connection. /// </summary> /// <param name="sourceResourceIds">The array of source resource ids</param> /// <param name="targetResourceIds">The array of target resource ids to copy to. Each resource id in the source array will be copied to the corresponding resource id in the target array</param> /// <param name="overwrite">Indicates whether to overwrite </param> /// <param name="options">Re-base options</param> /// <param name="callback"></param> /// <returns></returns> public string[] CopyResources(string[] sourceResourceIds, string[] targetResourceIds, bool overwrite, RebaseOptions options, LengthyOperationProgressCallBack callback) { Check.NotNull(sourceResourceIds, "sourceResourceIds"); //NOXLATE Check.NotNull(targetResourceIds, "targetResourceIds"); //NOXLATE Check.Precondition(sourceResourceIds.Length == targetResourceIds.Length, "resourceIds.Length == targetResourceIds.Length"); //NOXLATE var copiedItems = new List<string>(); var cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((s, e) => { //Do nothing }); } var targetCaps = _target.Capabilities; int copied = 0; int unit = 100 / sourceResourceIds.Length; int progress = 0; string message = string.Empty; for (int i = 0; i < sourceResourceIds.Length; i++) { var srcResId = sourceResourceIds[i]; var dstResId = targetResourceIds[i]; //Get the source resource object IResource res = _source.ResourceService.GetResource(srcResId); //Skip if target exists and overwrite is not specified if (!overwrite && _target.ResourceService.ResourceExists(dstResId)) { progress += unit; continue; } else { //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, srcResId, maxVer), progress)); } //Now rebase if rebase options supplied if (options != null) { var rebaser = new ResourceRebaser(res); res = rebaser.Rebase(options.SourceFolder, options.TargetFolder); } //Save resource _target.ResourceService.SaveResourceAs(res, dstResId); //Copy resource data foreach (var data in res.EnumerateResourceData()) { using (var stream = res.GetResourceData(data.Name)) { if (!stream.CanSeek) { using (var ms = new MemoryStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(dstResId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(dstResId, data.Name, data.Type, stream); } } } copied++; message = string.Format(Strings.CopiedResourceToTarget, srcResId, dstResId); } copiedItems.Add(srcResId); progress += unit; cb(this, new LengthyOperationProgressArgs(message, progress)); } return copiedItems.ToArray(); }
private string[] ExecuteBaseProcedure(LengthyOperationProgressCallBack cb, IBaseLoadProcedure proc, ref bool firstExecution) { List <string> resCreatedOrUpdated = new List <string>(); var files = proc.SourceFile; int pcPerFile = (int)(100 / files.Count); int current = 0; string root = proc.RootPath; if (!root.EndsWith("/")) //NOXLATE { root += "/"; //NOXLATE } string sdp = proc.SpatialDataSourcesPath; string lp = proc.LayersPath; if (!string.IsNullOrEmpty(sdp)) { if (!sdp.EndsWith("/")) //NOXLATE { sdp += "/"; //NOXLATE } } if (!string.IsNullOrEmpty(lp)) { if (!lp.EndsWith("/")) //NOXLATE { lp += "/"; //NOXLATE } } string fsRoot = (string.IsNullOrEmpty(sdp) ? root : sdp) + proc.SpatialDataSourcesFolder; string layerRoot = (string.IsNullOrEmpty(lp) ? root : lp) + proc.LayersFolder; if (!fsRoot.EndsWith("/")) //NOXLATE { fsRoot += "/"; //NOXLATE } if (!layerRoot.EndsWith("/")) //NOXLATE { layerRoot += "/"; //NOXLATE } List <string> resToUpdate = new List <string>(); if (proc.ResourceId != null && proc.ResourceId.Count > 0) { resToUpdate.AddRange(proc.ResourceId); firstExecution = false; } else { firstExecution = true; } foreach (string file in files) { bool success = false; if (System.IO.File.Exists(file)) { //GOTCHA: We are assuming these SDF files are not SDF2 files. This is //because there is no multi-platform solution to convert SDF2 files to SDF3 string resName = System.IO.Path.GetFileNameWithoutExtension(file); string dataName = System.IO.Path.GetFileName(file); string dsId = fsRoot + resName + ".DrawingSource"; //NOXLATE string fsId = fsRoot + resName + ".FeatureSource"; //NOXLATE string lyrId = layerRoot + resName + ".LayerDefinition"; //NOXLATE if (proc.GenerateSpatialDataSources) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(fsId)); if (!skip) { if (proc.Type == LoadType.Dwf) { //Process is as follows: // // 1. Create and save drawing source document. // 2. Upload dwf file as resource data for this document. //Step 1: Create and save drawing source document. IDrawingSource ds = ObjectFactory.CreateDrawingSource(); ds.SourceName = dataName; ds.CoordinateSpace = proc.CoordinateSystem; ds.ResourceID = dsId; this.Parent.ResourceService.SaveResource(ds); resCreatedOrUpdated.Add(dsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, dsId), current)); //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(dsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateLoaded, file), current)); ds.RegenerateSheetList(this.Parent); this.Parent.ResourceService.SaveResource(ds); ds.UpdateExtents(this.Parent); this.Parent.ResourceService.SaveResource(ds); } else { //Process is as follows: // // 1. Create and save feature source document. // 2. Upload sdf file as resource data for this document. // 3. Test the connection, it should check out. // 4. If no spatial contexts are detected, assign a default one from the load procedure and save the modified feature source. //Step 1: Create feature source document string provider = "OSGeo.SDF"; //NOXLATE switch (proc.Type) { case LoadType.Sqlite: provider = "OSGeo.SQLite"; //NOXLATE break; } var conp = new NameValueCollection(); conp["File"] = StringConstants.MgDataFilePath + dataName; var fs = ObjectFactory.CreateFeatureSource(provider, conp); fs.ResourceID = fsId; this.Parent.ResourceService.SaveResource(fs); resCreatedOrUpdated.Add(fsId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, fsId), current)); //TODO: When the infrastructure is available to us (ie. A portable .net FDO/MG Feature Service API wrapper) //Maybe then we can actually implement the generalization and duplicate record handling properties. Until then, we skip //these options //Step 2: Load resource data for document this.Parent.ResourceService.SetResourceData(fsId, dataName, ResourceDataType.File, System.IO.File.OpenRead(file)); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, file), current)); //Step 3: Test to make sure we're all good so far string result = this.Parent.FeatureService.TestConnection(fsId); //LocalNativeConnection returns this string, so I'm assuming this is the "success" result if (result == "No errors" || result.ToLower() == "true") //NOXLATE { //Step 4: Test to see if default cs needs to be specified FdoSpatialContextList spatialContexts = this.Parent.FeatureService.GetSpatialContextInfo(fsId, false); if (!string.IsNullOrEmpty(proc.CoordinateSystem)) { //Case 1: No spatial contexts. Register one using SupplementalContextInfo if (spatialContexts.SpatialContext.Count == 0) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource.v1_0_0.SpatialContextType() { Name = "Default", //NOXLATE CoordinateSystem = proc.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } else if (spatialContexts.SpatialContext.Count == 1) //Case 2: One spatial context, but its WKT is blank. Override using SupplementalContextInfo { var sc = spatialContexts.SpatialContext[0]; if (string.IsNullOrEmpty(sc.CoordinateSystemWkt)) { //Register the default CS from the load procedure fs.AddSpatialContextOverride(new OSGeo.MapGuide.ObjectModels.FeatureSource.v1_0_0.SpatialContextType() { Name = sc.Name, CoordinateSystem = proc.CoordinateSystem }); //Update this feature source this.Parent.ResourceService.SaveResource(fs); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateSetSpatialContext, fsId), current)); } } } } } } } if (proc.GenerateLayers) { //Skip only if we have an update list and this resource id is not in it bool skip = (resToUpdate.Count > 0 && !resToUpdate.Contains(lyrId)); if (!skip) { if (proc.Type == LoadType.Dwf) { //Process is as follows // // 1. Enumerate the sheets on the drawing source // 2. Set the referenced sheet to the first known sheet var dwSvc = (IDrawingService)Parent.GetService((int)ServiceType.Drawing); var list = dwSvc.EnumerateDrawingSections(dsId); if (list.Section.Count > 0) { //Create drawing layer var ld = ObjectFactory.CreateDefaultLayer(LayerType.Drawing, new Version(1, 0, 0)); var dl = ld.SubLayer as IDrawingLayerDefinition; dl.ResourceId = dsId; //Use the first one dl.Sheet = list.Section[0].Name; ld.ResourceID = lyrId; this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } else { //NOTE: Because we are working against 1.0.0 object types this will always create 1.0.0 Layer Definition //resources //Process is as follows // // 1. Describe the schema of the feature source // 2. If it contains at least one feature class, create a layer definition // 3. Set the following layer definition properties: // - Feature Source: the feature source id // - Feature Class: the first feature class in the schema // - Geometry: the first geometry property in the first feature class // 4. Infer the supported geometry types for this feature class. Toggle supported styles accordingly. //Step 1: Describe the schema // //NOTE: I think we can get away with the full schema walk here. It's very unlikely we will be uploading a flat //file with hundreds of classes. Even then, flat-file schema walk performance blows RDBMS walking performance //out of the water anyway. FeatureSourceDescription desc = this.Parent.FeatureService.DescribeFeatureSource(fsId); if (desc.HasClasses()) { //Step 2: Find the first feature class with a geometry property ClassDefinition clsDef = null; GeometricPropertyDefinition geom = null; bool done = false; foreach (ClassDefinition cls in desc.AllClasses) { if (done) { break; } foreach (PropertyDefinition prop in cls.Properties) { if (done) { break; } if (prop.Type == OSGeo.MapGuide.MaestroAPI.Schema.PropertyDefinitionType.Geometry) { clsDef = cls; geom = (GeometricPropertyDefinition)prop; done = true; } } } if (clsDef != null && geom != null) { var ld = ObjectFactory.CreateDefaultLayer(LayerType.Vector, new Version(1, 0, 0)); //Step 3: Assign default properties ld.ResourceID = lyrId; var vld = ld.SubLayer as IVectorLayerDefinition; vld.ResourceId = fsId; vld.FeatureName = clsDef.QualifiedName; vld.Geometry = geom.Name; //Step 4: Infer geometry storage support and remove unsupported styles var geomTypes = geom.GetIndividualGeometricTypes(); var scale = vld.GetScaleRangeAt(0); var remove = new List <string>(); if (Array.IndexOf(geomTypes, FeatureGeometricType.Point) < 0) { remove.Add(FeatureGeometricType.Point.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Curve) < 0) { remove.Add(FeatureGeometricType.Curve.ToString().ToLower()); } if (Array.IndexOf(geomTypes, FeatureGeometricType.Surface) < 0) { remove.Add(FeatureGeometricType.Surface.ToString().ToLower()); } scale.RemoveStyles(remove); this.Parent.ResourceService.SaveResource(ld); resCreatedOrUpdated.Add(lyrId); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateCreated, lyrId), current)); } } } } } success = true; } else { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileNotFound, file), current)); } //This file is now fully processed, so increment progress current += pcPerFile; if (success) { cb(this, new LengthyOperationProgressArgs(string.Format(Strings.TemplateFileProcessed, file), current)); } } return(resCreatedOrUpdated.ToArray()); }
public override bool MoveFolderWithReferences(string oldpath, string newpath, LengthyOperationCallBack callback, LengthyOperationProgressCallBack progress) { if (this.SiteVersion >= new Version(2, 2)) //new way { //Unfortunately because this is all batched server-side, there is no //meaningful way to track progress LengthyOperationProgressArgs la = new LengthyOperationProgressArgs("Moving resource...", -1); //LOCALIZEME if (progress != null) progress(this, la); oldpath = FixAndValidateFolderPath(oldpath); newpath = FixAndValidateFolderPath(newpath); string req = m_reqBuilder.MoveResource(oldpath, newpath, true); req += "&CASCADE=1"; using (System.IO.Stream resp = this.OpenRead(req)) resp.ReadByte(); return true; } else //old way { return base.MoveFolderWithReferences(oldpath, newpath, callback, progress); } }
/// <summary> /// Explodes a themed layer into filtered sub-layers where each sub-layer is filtered on the individual theme rule's filter /// </summary> /// <param name="options"></param> /// <param name="progress"></param> public static void ExplodeThemeIntoFilteredLayers(ExplodeThemeOptions options, LengthyOperationProgressCallBack progress) { var conn = options.Layer.CurrentConnection; var origLayerId = options.Layer.ResourceID; string layerPrefix = options.LayerPrefix; var origVl = (IVectorLayerDefinition)options.Layer.SubLayer; var origRange = options.Range; var origStyle = options.ActiveStyle; int processed = 0; for (int i = 0; i < origStyle.RuleCount; i++) { var currentRule = origStyle.GetRuleAt(i); var newLayer = ObjectFactory.CreateDefaultLayer(conn, LayerType.Vector, options.Layer.ResourceVersion); var vl = (IVectorLayerDefinition)newLayer.SubLayer; vl.ResourceId = origVl.ResourceId; vl.FeatureName = origVl.FeatureName; vl.Geometry = origVl.Geometry; //Set this layer's filter to be that of the current rule vl.Filter = currentRule.Filter; //A newly created Layer Definition will only have one scale range var range = vl.GetScaleRangeAt(0); range.MinScale = origRange.MinScale; range.MaxScale = origRange.MaxScale; //Composite styles aren't applicable, so remove them if they exist var range2 = range as IVectorScaleRange2; if (range2 != null) range2.CompositeStyle = null; //Invalidate geometry types not of the original style switch (origStyle.StyleType) { case StyleType.Area: range.LineStyle = null; range.PointStyle = null; IAreaRule ar = range.AreaStyle.GetRuleAt(0); IAreaRule oar = (IAreaRule)currentRule; if (oar.AreaSymbolization2D != null) ar.AreaSymbolization2D = oar.AreaSymbolization2D.Clone(); if (oar.Label != null) ar.Label = oar.Label.Clone(); break; case StyleType.Line: range.AreaStyle = null; range.PointStyle = null; ILineRule lr = range.LineStyle.GetRuleAt(0); ILineRule olr = (ILineRule)currentRule; if (olr.StrokeCount > 0) { foreach (var stroke in olr.Strokes) { lr.AddStroke(stroke.Clone()); } } if (olr.Label != null) lr.Label = olr.Label.Clone(); break; case StyleType.Point: range.AreaStyle = null; range.LineStyle = null; IPointRule pr = range.PointStyle.GetRuleAt(0); IPointRule opr = (IPointRule)currentRule; if (opr.Label != null) pr.Label = opr.Label.Clone(); if (opr.PointSymbolization2D != null) pr.PointSymbolization2D = opr.PointSymbolization2D.Clone(); break; } string newResId = options.FolderId + GenerateLayerName(options.LayerNameFormat, layerPrefix, GetScaleRangeStr(options.Range), i, currentRule) + ".LayerDefinition"; conn.ResourceService.SaveResourceAs(newLayer, newResId); processed++; if (progress != null) progress(null, new LengthyOperationProgressArgs(newResId, (processed / origStyle.RuleCount) * 100)); } }
/// <summary> /// Moves resources from the source connection to the specified folder on the target connection. Folder structure of the source is discarded /// </summary> /// <param name="resourceIds"></param> /// <param name="folderId"></param> /// <param name="overwrite"></param> /// <param name="callback"></param> /// <returns></returns> public int MoveResources(string[] resourceIds, string folderId, bool overwrite, LengthyOperationProgressCallBack callback) { Check.ArgumentNotNull(resourceIds, nameof(resourceIds)); Check.ArgumentNotEmpty(folderId, nameof(folderId)); var cb = callback; if (cb == null) { cb = new LengthyOperationProgressCallBack((s, e) => { //Do nothing }); } var targetCaps = _target.Capabilities; int moved = 0; int unit = 100 / resourceIds.Length; int progress = 0; foreach (var resId in resourceIds) { string targetId = folderId + ResourceIdentifier.GetName(resId) + "." + ResourceIdentifier.GetResourceTypeAsString(resId); //NOXLATE string message = string.Empty; //Skip if target exists and overwrite is not specified if (!overwrite && _target.ResourceService.ResourceExists(targetId)) { progress += unit; continue; } else { IResource res = _source.ResourceService.GetResource(resId); //Check if downgrading is required var maxVer = targetCaps.GetMaxSupportedResourceVersion(res.ResourceType); if (res.ResourceVersion > maxVer) { res = _converter.Convert(res, maxVer); cb(this, new LengthyOperationProgressArgs(string.Format(Strings.DowngradedResource, resId, maxVer), progress)); } //Save resource _target.ResourceService.SaveResourceAs(res, targetId); //Copy resource data var resData = _source.ResourceService.EnumerateResourceData(res.ResourceID); foreach (var data in resData.ResourceData) { using (var stream = _source.ResourceService.GetResourceData(res.ResourceID, data.Name)) { if (!stream.CanSeek) { using (var ms = MemoryStreamPool.GetStream()) { Utility.CopyStream(stream, ms, false); ms.Position = 0L; _target.ResourceService.SetResourceData(targetId, data.Name, data.Type, ms); } } else { stream.Position = 0L; _target.ResourceService.SetResourceData(targetId, data.Name, data.Type, stream); } } } moved++; _source.ResourceService.DeleteResource(resId); message = string.Format(Strings.CopiedResource, resId); } progress += unit; cb(this, new LengthyOperationProgressArgs(message, progress)); } return(moved); }
public string[] ExecuteLoadProcedure(string resourceID, LengthyOperationProgressCallBack callback, bool ignoreUnsupportedFeatures) { throw new NotImplementedException(); }
private static int DoMigrate(OSGeo.MapGuide.MaestroAPI.IServerConnection source, OSGeo.MapGuide.MaestroAPI.IServerConnection target, CopyMoveToServerDialog migrate) { var diag = new ProgressDialog(); diag.CancelAbortsThread = true; var method = new ProgressDialog.DoBackgroundWork((worker, e, args) => { var src = (IServerConnection)args[0]; var dst = (IServerConnection)args[1]; var ids = (string[])args[2]; var folder = (string)args[3]; var overwrite = (bool)args[4]; var act = (MigrationAction)args[5]; var cb = new LengthyOperationProgressCallBack((sender, cbe) => { worker.ReportProgress(cbe.Progress, cbe.StatusMessage); }); var migrator = new ResourceMigrator(source, target); int affected = 0; switch (act) { case MigrationAction.Copy: affected = migrator.CopyResources(ids, folder, overwrite, cb); break; case MigrationAction.Move: affected = migrator.MoveResources(ids, folder, overwrite, cb); break; } return affected; }); return (int)diag.RunOperationAsync(Workbench.Instance, method, source, target, migrate.SourceResourceIds, migrate.TargetFolder, migrate.OverwriteResources, migrate.SelectedAction); }
internal string[] CopyResourcesToFolder(RepositoryHandle[] data, string targetConnectionName, string folderId) { string rootSourceParent = GetCommonParent(data); //There is an implicit assumption here that all items dropped come from the same connection var sourceConn = data.First().Connection; var targetConn = _connManager.GetConnection(targetConnectionName); var migrator = new ResourceMigrator(sourceConn, targetConn); //Collect all source ids var sourceIds = new List <string>(); foreach (var resId in data.Select(x => x.ResourceId.ToString())) { if (ResourceIdentifier.IsFolderResource(resId)) { sourceIds.AddRange(GetFullResourceList(sourceConn, resId)); } else { sourceIds.Add(resId); } } var targets = new List <string>(); foreach (var resId in sourceIds) { var dstId = resId.Replace(rootSourceParent, folderId); System.Diagnostics.Trace.TraceInformation($"{resId} => {dstId}"); //NOXLATE targets.Add(dstId); } bool overwrite = true; var existing = new List <string>(); foreach (var resId in targets) { if (targetConn.ResourceService.ResourceExists(resId)) { existing.Add(resId); } } if (existing.Count > 0) { overwrite = MessageService.AskQuestion(string.Format(Strings.PromptOverwriteOnTargetConnection, existing.Count)); } var wb = Workbench.Instance; var dlg = new ProgressDialog(); var worker = new ProgressDialog.DoBackgroundWork((w, evt, args) => { LengthyOperationProgressCallBack cb = (s, cbe) => { w.ReportProgress(cbe.Progress, cbe.StatusMessage); }; return(migrator.CopyResources(sourceIds.ToArray(), targets.ToArray(), overwrite, new RebaseOptions(rootSourceParent, folderId), cb)); }); var result = (string[])dlg.RunOperationAsync(wb, worker); RefreshModel(targetConn.DisplayName, folderId); ExpandNode(targetConn.DisplayName, folderId); return(result); }
public override void Run() { var wb = Workbench.Instance; var exp = wb.ActiveSiteExplorer; var omgr = ServiceRegistry.GetService <OpenResourceManager>(); var connMgr = ServiceRegistry.GetService <ServerConnectionManager>(); var conn = connMgr.GetConnection(exp.ConnectionName); if (exp.SelectedItems.Length == 1) { var current = exp.SelectedItems[0]; var parent = current.Parent; List <string> names = new List <string>(); foreach (var item in parent.Children) { if (item != exp.SelectedItems[0]) { names.Add(item.Name); } } if (!current.IsFolder && omgr.IsOpen(current.ResourceId, conn)) { MessageService.ShowMessage(Strings.CannotRenameAlreadyOpenedResource); return; } var dlg = new RenameItemDialog(current.Name, names); if (dlg.ShowDialog(wb) == System.Windows.Forms.DialogResult.OK) { string oldid = string.Empty; string newid = string.Empty; if (current.IsFolder) { oldid = $"{parent.ResourceId + current.Name}/"; //NOXLATE newid = $"{parent.ResourceId + dlg.NewName}/"; //NOXLATE } else { oldid = $"{parent.ResourceId + current.Name}.{current.ResourceType}"; //NOXLATE newid = $"{parent.ResourceId + dlg.NewName}.{current.ResourceType}"; //NOXLATE } if (omgr.IsOpen(newid, conn)) { MessageService.ShowMessage(string.Format(Strings.CannotRenameToResourceAlreadyOpened, newid)); return; } var prog = new ProgressDialog(); prog.RunOperationAsync(wb, (worker, e, args) => { LengthyOperationProgressCallBack cb = (s, cbArgs) => { worker.ReportProgress(cbArgs.Progress, cbArgs.StatusMessage); }; //Perform the operation if (dlg.UpdateReferences) { conn.ResourceService.MoveResourceWithReferences(oldid, newid, null, cb); } else { conn.ResourceService.MoveResource(oldid, newid, true); // dlg.Overwrite); } /* * if (current.IsFolder) * { * if (dlg.UpdateReferences) * conn.ResourceService.MoveFolderWithReferences(oldid, newid, null, cb); * else * conn.ResourceService.MoveFolder(oldid, newid, dlg.Overwrite); * } * else * { * if (dlg.UpdateReferences) * { * conn.ResourceService.MoveResourceWithReferences(oldid, newid, null, cb); * } * else * conn.ResourceService.MoveResource(oldid, newid, dlg.Overwrite); * }*/ current.Name = dlg.NewName; return(true); }); //Need to refresh the model because it still is called by the old name var folder = ResourceIdentifier.GetParentFolder(oldid); exp.RefreshModel(conn.DisplayName, folder); } } }
private string[] MoveResourcesWithinConnection(string connectionName, ICollection <string> resIds, string folderId) { var wb = Workbench.Instance; var notMovedToTarget = new List <string>(); var notMovedFromSource = new List <string>(); var omgr = ServiceRegistry.GetService <OpenResourceManager>(); var conn = _connManager.GetConnection(connectionName); var dlg = new ProgressDialog(); var worker = new ProgressDialog.DoBackgroundWork((w, e, args) => { LengthyOperationProgressCallBack cb = (sender, cbe) => { w.ReportProgress(cbe.Progress, cbe.StatusMessage); }; var f = (string)args[0]; var resourceIds = (ICollection <string>)args[1]; foreach (var r in resourceIds) { if (ResourceIdentifier.IsFolderResource(r)) { //IMPORTANT: We need to tweak the target resource id //otherwise the content *inside* the source folder is //moved instead of the folder itself! var rid = new ResourceIdentifier(r); var target = $"{folderId + rid.Name}/"; //NOXLATE conn.ResourceService.MoveResourceWithReferences(r, target, null, cb); } else { var rid = new ResourceIdentifier(r); var target = $"{folderId + rid.Name}.{rid.Extension}"; //NOXLATE if (omgr.IsOpen(r, conn)) { notMovedFromSource.Add(r); continue; } if (!omgr.IsOpen(target, conn)) { conn.ResourceService.MoveResourceWithReferences(r, target, null, cb); } else { notMovedToTarget.Add(r); } } } //Collect affected folders and refresh them Dictionary <string, string> folders = new Dictionary <string, string>(); folders.Add(folderId, folderId); foreach (var n in resourceIds) { var ri = new ResourceIdentifier(n); var parent = ri.ParentFolder; if (parent != null && !folders.ContainsKey(parent)) { folders.Add(parent, parent); } } return(folders.Keys); }); var affectedFolders = (IEnumerable <string>)dlg.RunOperationAsync(wb, worker, folderId, resIds); if (notMovedToTarget.Count > 0 || notMovedFromSource.Count > 0) { MessageService.ShowMessage(string.Format( Strings.NotCopiedOrMovedDueToOpenEditors, Environment.NewLine + string.Join(Environment.NewLine, notMovedToTarget.ToArray()) + Environment.NewLine, Environment.NewLine + string.Join(Environment.NewLine, notMovedFromSource.ToArray()) + Environment.NewLine)); } return(new List <string>(affectedFolders).ToArray()); }
public string[] ExecuteLoadProcedure(OSGeo.MapGuide.ObjectModels.LoadProcedure.ILoadProcedure loadProc, LengthyOperationProgressCallBack callback, bool ignoreUnsupportedFeatures) { throw new NotImplementedException(); }
/// <summary> /// Executes the specified load procedure. Only SDF and SHP load procedures are supported. /// Also note that the following load procedure features are ignored during execution: /// - Generalization of data /// - Conversion from SHP to SDF /// - SDF2 to SDF3 conversion /// - SDF3 duplicate key handling /// </summary> /// <param name="proc">The proc.</param> /// <param name="callback">The callback.</param> /// <returns> /// A list of resource IDs that were created from the execution of this load procedure /// </returns> public string[] Execute(ILoadProcedure proc, LengthyOperationProgressCallBack callback) { //TODO: Localize callback messages //TODO: Localize exception messages //TODO: This would currently overwrite everything. In reality, the load procedure has //a list of resource ids which are overwritable, anything not on the list is untouchable. //I presume if this list is empty, then everything is overwritten and the resource list //list is then assigned to the load procedure, which is then updated so that on subsequent runs, //only resources in the list are overwritten instead of everything. string[] resourcesCreatedOrUpdated = null; LengthyOperationProgressCallBack cb = callback; //Assign dummy callback if none specified if (cb == null) { cb = delegate { } } ; //bool loadProcedureUpdated = false; //bool updateGeneratedResourceIds = false; //TODO: SDF and SHP load procedures share lots of common logic. Merge the two //once everything's all good. var type = proc.SubType.Type; if (type == LoadType.Dwg || type == LoadType.Raster) { throw new NotSupportedException(Strings.UnsupportedLoadProcedureType); } var sproc = (IBaseLoadProcedure)proc.SubType; bool firstExecute = true; if (type == LoadType.Shp) { var shpl = (IShpLoadProcedure)sproc; if (!this.IgnoreUnsupportedFeatures) { //Anything less than 100% implies use of generalization if (shpl.Generalization < 100.0) { throw new NotSupportedException(Strings.LPROC_GeneralizationNotSupported); } //Can't do this because we don't have a portable .net FDO/MG Feature Service if (shpl.ConvertToSdf) { throw new NotSupportedException(Strings.LPROC_ConvertToSdf3NotSupported); } } resourcesCreatedOrUpdated = ExecuteShpLoadProcedure(cb, shpl, ref firstExecute); } else { if (!this.IgnoreUnsupportedFeatures) { CheckUnsupportedFeatures(sproc); } resourcesCreatedOrUpdated = ExecuteBaseProcedure(cb, sproc, ref firstExecute); } //Update the generated resources list if this is the first execution if (firstExecute) { sproc.ResourceId.Clear(); foreach (var it in resourcesCreatedOrUpdated) { sproc.ResourceId.Add(it); } //Before we'd save here, but instead let's mark the resource as dirty from the user side } return(resourcesCreatedOrUpdated); }