public void DeleteErrorObjects([NotNull] IQueryFilter filter) { Assert.ArgumentNotNull(filter, nameof(filter)); Stopwatch watch = _msg.DebugStartTiming(); IQueryFilter tableSpecificFilter = AdaptFilterToErrorTable(filter); Table.DeleteSearchedRows(tableSpecificFilter); _msg.DebugStopTiming(watch, "Errors deleted in {0}", DatasetName); }
public void AddConnectivity( [NotNull] IEnumerable <IFeature> lineFeatures, bool respectLineOrientation, [CanBeNull] ITrackCancel trackCancel = null) { Stopwatch stopwatch = _msg.DebugStartTiming("Building / Enlarging connectivity graph..."); var count = 0; foreach (IFeature lineFeature in lineFeatures) { if (trackCancel != null && !trackCancel.Continue()) { _msg.DebugStopTiming(stopwatch, "Building connectivity graph cancelled after {0} features", count); return; } AddConnectivity(lineFeature, respectLineOrientation); count++; } _msg.DebugStopTiming(stopwatch, "Added connectivity information for {0} features. Total node count: {1}", count, Nodes.Count); }
public virtual void GoNearest(Geometry reference, Predicate <IWorkItem> match, params Polygon[] contextPerimeters) { Assert.ArgumentNotNull(reference, nameof(reference)); Stopwatch watch = _msg.DebugStartTiming(); // start after the current item int startIndex = CurrentIndex + 1; // first, try to go to an unvisted item bool found = TryGoNearest(contextPerimeters, reference, VisitedSearchOption.ExcludeVisited, startIndex); if (!found) { // if none found, search also the visited ones, but // only those *after* the current item found = TryGoNearest(contextPerimeters, reference, VisitedSearchOption.IncludeVisited, startIndex); } if (!found && HasCurrentItem && Current != null) { ClearCurrentItem(Current); } _msg.DebugStopTiming(watch, nameof(GetNearest)); }
public override void CompletingOperation() { if (!HasEdits()) { return; } Stopwatch watch = _msg.DebugStartTiming(); if (NoCaching) { ApplyNetworkUpdateRules(); } else { IEnvelope aoi = GetCacheExtent(); try { NetworkFeatureFinder.CacheTargetFeatureCandidates(aoi); ApplyNetworkUpdateRules(); } finally { NetworkFeatureFinder.InvalidateTargetFeatureCache(); } } _msg.DebugStopTiming( watch, "CompletingOperation - processed {0} original network updates", _createdInOperation.Count + _updatedInOperation.Count); }
public void CacheTargetFeatureCandidates(IGeometry searchGeometry) { if (searchGeometry.IsEmpty) { return; } double xyTolerance = GeometryUtils.GetXyTolerance(searchGeometry); double expansion = SearchTolerance > xyTolerance ? SearchTolerance : xyTolerance; // Searching by envelope does not use the tolerance! IEnvelope searchEnvelope = GeometryUtils.GetExpandedEnvelope( searchGeometry, expansion); Stopwatch watch = _msg.DebugStartTiming(); var geometryTypes = new[] { esriGeometryType.esriGeometryPolyline, esriGeometryType.esriGeometryPoint }; TargetFeatureCandidates = ReadFeatures(searchEnvelope, geometryTypes); _msg.DebugStopTiming(watch, "Cached network features."); }
public static void MakeGeometryStorable([NotNull] IGeometry reshapedGeometry, [NotNull] IGeometry originalGeometry, [NotNull] IFeature feature, bool allowPolylineSplitAndUnsplit = false) { Assert.ArgumentCondition(originalGeometry != reshapedGeometry, "Original and reshaped geometry are the same instance."); Stopwatch watch = _msg.DebugStartTiming( "Simplify/EnsureZs of geometry to store in {0}...", GdbObjectUtils.ToString(feature)); // NOTE: Simplify fails (in some situations) with undefined Z values (e.g. from non-Z-aware targets) - EnsureZsAreNonNan first // NOTE: Simplify can result in undefined Z values - EnsureZsAreNonNan again afterwards // NOTE: Simplify in the smaller tolerance, otherwise untouched vertices can get changed! EnsuresZsAreNonNan(reshapedGeometry, originalGeometry, feature); // Simplify is also needed even if all segment orientation is ok // because a reshape with more than one CutSubcurve can result in paths // leading to the inside of the ring -> hard to detect. const bool allowReorder = false; GeometryUtils.Simplify(reshapedGeometry, allowReorder, allowPolylineSplitAndUnsplit); EnsuresZsAreNonNan(reshapedGeometry, originalGeometry, feature); Assert.False(GeometryUtils.HasUndefinedZValues(reshapedGeometry), "Geometry has undefined Zs."); _msg.DebugStopTiming(watch, "Simplified and ensured Z values"); }
public static AdjustedCutSubcurve CalculateAdjustedPath( [NotNull] IPath adjustLine, [NotNull] ICurve sourcePart, [NotNull] IConnectLineCalculator connectCalculator) { Stopwatch watch = _msg.DebugStartTiming(); AdjustedCutSubcurve adjustedSubcurve = null; IPath startFallback; IPath endFallback; IPath startSourceConnection = connectCalculator.FindConnection( sourcePart, adjustLine, true, out startFallback); IPath endSourceConnection = connectCalculator.FindConnection( sourcePart, adjustLine, false, out endFallback); if (_msg.IsVerboseDebugEnabled) { _msg.DebugFormat("CalculatedAdjustedPath start connection: {0}", GeometryUtils.ToString(startSourceConnection)); _msg.DebugFormat("CalculateAdjustedPath: end connection: {0}", GeometryUtils.ToString(endSourceConnection)); } // Possible criteria to consider the path valid: // - length of connections vs. length of path // - angle between path and connections // - size of the reshape-area (difference before/after reshape) vs. length of path (sliver condition?) ValidateConnectLines(sourcePart, ref startSourceConnection, ref endSourceConnection, startFallback, endFallback); if (startSourceConnection != null && endSourceConnection != null) { adjustedSubcurve = CreateAdjustedCutSubcurve(adjustLine, startSourceConnection, endSourceConnection); } _msg.DebugStopTiming(watch, "Calculated adjusted subcurve including connection lines to target"); return(adjustedSubcurve); }
public override async Task <CalculateOverlapsResponse> CalculateOverlaps( CalculateOverlapsRequest request, ServerCallContext context) { Stopwatch watch = _msg.DebugStartTiming(); Func <ITrackCancel, CalculateOverlapsResponse> func = trackCancel => RemoveOverlapsServiceUtils.CalculateOverlaps(request, trackCancel); CalculateOverlapsResponse response = await GrpcServerUtils.ExecuteServiceCall(func, context, _staTaskScheduler, true) ?? new CalculateOverlapsResponse(); _msg.DebugStopTiming(watch, "Calculated overlaps for peer {0} ({1} source features)", context.Peer, request.SourceFeatures.Count); return(Assert.NotNull(response)); }
public IEnumerable <IWorkItem> GetItems(QueryFilter filter = null, bool recycle = true) { foreach (ISourceClass sourceClass in GeodatabaseBySourceClasses.Keys) { int count = 0; Stopwatch watch = _msg.DebugStartTiming(); foreach (Row row in GetRowsCore(sourceClass, filter, recycle)) { IWorkItem item = CreateWorkItemCore(row, sourceClass); count += 1; yield return(WorkItemStateRepository.Refresh(item)); } _msg.DebugStopTiming(watch, $"{nameof(GetItems)}() {sourceClass.Name}: {count} items"); } // return GeodatabaseBySourceClasses.Keys.SelectMany(sourceClass => GetItemsCore(sourceClass, filter, recycle)); }
public override async Task <CalculateReshapeLinesResponse> CalculateReshapeLines( [NotNull] CalculateReshapeLinesRequest request, [NotNull] ServerCallContext context) { Stopwatch watch = _msg.DebugStartTiming(); Func <ITrackCancel, CalculateReshapeLinesResponse> func = trackCancel => ChangeAlongServiceUtils.CalculateReshapeLines(request, trackCancel); CalculateReshapeLinesResponse response = await GrpcServerUtils.ExecuteServiceCall(func, context, _staTaskScheduler, true) ?? new CalculateReshapeLinesResponse(); _msg.DebugStopTiming( watch, "Calculated reshape lines for peer {0} ({1} source features, {2}, target features)", context.Peer, request.SourceFeatures.Count, request.TargetFeatures.Count); return(response); }
public override PluginCursorTemplate Search(QueryFilter queryFilter) { Stopwatch watch = _msg.DebugStartTiming(); List <object[]> list = _workList.GetItems(queryFilter, true) .Select(item => GetValues(item, _workList.Current)) .ToList(); // TODO drop ToList, inline _msg.DebugStopTiming(watch, $"{nameof(WorkItemTable)}.{nameof(Search)}(): {list.Count} items"); return(new WorkItemCursor(list)); }
public SessionProvider(INHConfigurationBuilder configBuilder) { Stopwatch watch = _msg.DebugStartTiming(); Configuration configuration = configBuilder.GetConfiguration(); _sessionFactory = configuration.BuildSessionFactory(); _msg.DebugStopTiming( watch, "Configured NHibernate and created session factory using the following configuration: {0}", configuration); }
public int SaveIssues(ErrorDeletionInPerimeter errorDeletion) { Assert.NotNull(_resultIssueCollector).ErrorDeletionInPerimeter = errorDeletion; Stopwatch watch = _msg.DebugStartTiming( "Replacing existing errors with new issues, deleting obsolete allowed errors..."); var verifiedConditions = GetVerifiedConditionIds(VerificationMsg).ToList(); int issueCount = _resultIssueCollector.SaveIssues(verifiedConditions); _msg.DebugStopTiming(watch, "Updated issues in verified context"); return(issueCount); }
public override async Task <AdvancedReshapeResponse> AdvancedReshape( AdvancedReshapeRequest request, ServerCallContext context) { Stopwatch watch = _msg.DebugStartTiming(); Func <ITrackCancel, AdvancedReshapeResponse> func = trackCancel => AdvancedReshapeServiceUtils.Reshape(request); AdvancedReshapeResponse response = await GrpcServerUtils.ExecuteServiceCall(func, context, _staTaskScheduler, true) ?? new AdvancedReshapeResponse(); _msg.DebugStopTiming(watch, "Reshaped for peer {0} ({1} source features)", context.Peer, request.Features.Count); return(response); }
/// <summary> /// Calculates the segments shorter than the specified length per feature /// within the provided perimeter. /// </summary> /// <param name="forFeatureVertexInfos"></param> /// <param name="use2DLengthOnly"></param> /// <param name="perimeter"></param> /// <param name="trackCancel"></param> /// <returns>The short segments per feature</returns> public static void CalculateShortSegments( [NotNull] ICollection <FeatureVertexInfo> forFeatureVertexInfos, bool use2DLengthOnly, [CanBeNull] IGeometry perimeter, [CanBeNull] ITrackCancel trackCancel) { Assert.ArgumentNotNull(forFeatureVertexInfos, nameof(forFeatureVertexInfos)); Stopwatch watch = _msg.DebugStartTiming( "Getting short segments for {0} features.", forFeatureVertexInfos.Count); var shortSegmentCount = 0; var shortFeatureCount = 0; foreach (FeatureVertexInfo vertexInfo in forFeatureVertexInfos) { if (trackCancel != null && !trackCancel.Continue()) { return; } try { IList <esriSegmentInfo> removableSegments = CalculateShortSegments(vertexInfo, use2DLengthOnly, perimeter); shortSegmentCount += removableSegments.Count; shortFeatureCount++; } catch (Exception) { _msg.ErrorFormat("Error calculating generalized points for {0}", GdbObjectUtils.ToString(vertexInfo.Feature)); throw; } } _msg.DebugStopTiming(watch, "Found {0} segments shorter than minimum segment length in {1} of {2} features.", shortSegmentCount, shortFeatureCount, forFeatureVertexInfos.Count); }
internal static HashSet <int> GetOidsByRelatedGeometry( [NotNull] ITable table, [NotNull] IEnumerable <IList <IRelationshipClass> > relClassChains, [NotNull] IGeometry testPerimeter, [NotNull] ITest testWithTable) { Assert.ArgumentNotNull(table, nameof(table)); Assert.ArgumentNotNull(relClassChains, nameof(relClassChains)); Assert.ArgumentNotNull(testPerimeter, nameof(testPerimeter)); string whereClause = string.Empty; string postfixClause = string.Empty; string tableName = DatasetUtils.GetName(table); Stopwatch watch = _msg.DebugStartTiming("Getting row OIDs by related geometry for {0}", tableName); var result = new HashSet <int>(); foreach (IList <IRelationshipClass> relClassChain in relClassChains) { // NOTE: // - if only the OID is in the subfields, then ArcMap crashes without // catchable exception in RARE cases // - if only the OID plus the Shape field of the involved feature class are in the subfields, then // in those same cases a "Shape Integrity Error" exception is thrown. foreach (FieldMappingRowProxy row in GdbQueryUtils.GetRowProxys((IObjectClass)table, testPerimeter, whereClause, relClassChain, postfixClause, subfields: null, includeOnlyOIDFields: true, recycle: true)) { result.Add(row.OID); } } _msg.DebugStopTiming(watch, "GetOIDsByRelatedGeometry() table: {0} test: {1}", tableName, testWithTable); return(result); }
public SessionProvider(INHConfigurationBuilder configBuilder) { Stopwatch watch = _msg.DebugStartTiming(); try { Configuration configuration = configBuilder.GetConfiguration(); _sessionFactory = configuration.BuildSessionFactory(); _msg.DebugStopTiming( watch, "Successfully configured NHibernate and created session factory."); } catch (Exception e) { // Do not throw - in some applications the DDX is optional (such as field admin). _msg.Debug("Failed to create NHibernate session factory.", e); _sessionFactoryErrorMessage = e.Message; } }
/// <summary> /// Tries to find target intersection points for source intersection points that have /// no target point defined yet and adds these points to the source-target pairs. /// The automatically determined target points are at the intersection of the prolonged /// shared boundary of polgons and the reshape path. /// </summary> /// <param name="reshapePath">The reshape path.</param> /// <param name="geometriesToReshape">The geometries to reshape.</param> /// <param name="sketchOriginalIntersectionPoints">The intersection points between the /// sketch and the original geometries to reshape.</param> /// <returns></returns> public IList <KeyValuePair <IPoint, IPoint> > AddAutomaticSourceTargetPairs( [NotNull] IPath reshapePath, [NotNull] IList <IGeometry> geometriesToReshape, [NotNull] IPointCollection sketchOriginalIntersectionPoints) { // TODO: Consider moving the automatic pair calculation elsewhere int origPairCount = SourceTargetPairs.Count; Stopwatch watch = _msg.DebugStartTiming("Calculating automatic source-target pairs."); IEnumerable <KeyValuePair <IPoint, IPoint> > automaticPairs = MatchSinglePointsWithExtendedSourceLines(reshapePath, geometriesToReshape, sketchOriginalIntersectionPoints); SourceTargetPairs.AddRange(automaticPairs); _msg.DebugStopTiming(watch, "Found addtitional {0} source-target pairs by automatic matching. Total: {1}", SourceTargetPairs.Count - origPairCount, SourceTargetPairs.Count); return(SourceTargetPairs); }
public IPolyline GetBaseSegmentZDifferences( out IDictionary <WKSPointZ, VertexIndex> zDifferentPoints) { Assert.True(_baseGeometry is IPolyline || _baseGeometry is IPath, "Not implemented for geometries other than lines."); Stopwatch watch = _msg.DebugStartTiming("Calculating Z difference on source..."); // Get the points that are in the base but not in the target: zDifferentPoints = GetDifference(true); IDictionary <VertexIndex, ISegment> sourceSegmentsToAdd = new Dictionary <VertexIndex, ISegment>(); foreach ( KeyValuePair <WKSPointZ, VertexIndex> differentPointOnPath in zDifferentPoints) { bool targetContainsPoint = CompareGeometryContainsPoint3D(differentPointOnPath.Key); if (!targetContainsPoint) { // Take the respective segment before and after this point as difference VertexIndex vertexIndex = differentPointOnPath.Value; int previousSegmentIdx = vertexIndex.VertexIndexInPart - 1; var previousVertexIdx = new VertexIndex(vertexIndex.PartIndex, previousSegmentIdx, false); if (previousSegmentIdx >= 0 && !sourceSegmentsToAdd.ContainsKey(previousVertexIdx)) { ISegment segment = GetBaseSegment(previousVertexIdx); sourceSegmentsToAdd.Add(previousVertexIdx, segment); } if (!vertexIndex.IsLastInPart && !sourceSegmentsToAdd.ContainsKey(vertexIndex)) { ISegment segment = GetBaseSegment(vertexIndex); sourceSegmentsToAdd.Add(vertexIndex, segment); } } } IPolyline sourceDifferences = GeometryFactory.CreatePolyline(_baseGeometry.SpatialReference, true, GeometryUtils.IsMAware(_baseGeometry)); var segmentArray = new ISegment[sourceSegmentsToAdd.Count]; sourceSegmentsToAdd.Values.CopyTo(segmentArray, 0); GeometryUtils.GeometryBridge.SetSegments((ISegmentCollection)sourceDifferences, ref segmentArray); _msg.DebugStopTiming(watch, "Calculated Z differences on source and built difference line"); return(sourceDifferences); }
/// <summary> /// Calculates the DifferenceLines and adds the subcurves to the provided result list. /// </summary> /// <param name="sourceGeometry"></param> /// <param name="targetPolyline"></param> /// <param name="resultList">All resulting subcurves including the ones that cannot be used to reshape</param> /// <param name="trackCancel"></param> /// <returns></returns> public ReshapeAlongCurveUsability CalculateSubcurves( IGeometry sourceGeometry, IPolyline targetPolyline, IList <CutSubcurve> resultList, ITrackCancel trackCancel) { Assert.ArgumentNotNull(sourceGeometry); Assert.ArgumentNotNull(targetPolyline); Assert.ArgumentNotNull(resultList); Stopwatch watch = _msg.DebugStartTiming(); IPolyline preprocessedSourcePolyline = ChangeGeometryAlongUtils.GetPreprocessedGeometryForExtent( sourceGeometry, ClipExtent); if (preprocessedSourcePolyline.IsEmpty) { _msg.WarnFormat("Source feature is outside the processing extent."); return(ReshapeAlongCurveUsability.NoSource); } IPointCollection intersectionPoints; IGeometryCollection differences = CalculateDifferences( preprocessedSourcePolyline, targetPolyline, trackCancel, out intersectionPoints); if (trackCancel != null && !trackCancel.Continue()) { return(ReshapeAlongCurveUsability.Undefined); } if (differences == null) { return(ReshapeAlongCurveUsability.AlreadyCongruent); } SubcurveFilter?.PrepareForSource(sourceGeometry); bool canReshape = CalculateReshapeSubcurves( preprocessedSourcePolyline, targetPolyline, differences, intersectionPoints, resultList, trackCancel); JoinNonForkingSubcurves(resultList); Marshal.ReleaseComObject(preprocessedSourcePolyline); Marshal.ReleaseComObject(differences); _msg.DebugStopTiming( watch, "RecalculateReshapableSubcurves: Total number of curves: {0}.", resultList.Count); if (canReshape) { return(ReshapeAlongCurveUsability.CanReshape); } return(resultList.Count == 0 ? ReshapeAlongCurveUsability.NoReshapeCurves : ReshapeAlongCurveUsability.InsufficientOrAmbiguousReshapeCurves); }
/// <summary> /// Verifies the specified object classes. /// </summary> /// <param name="qualitySpecification">The quality specification to verify.</param> /// <param name="datasetContext">The model context.</param> /// <param name="datasetResolver">The resolver for getting the object dataset based on a table name, in the context of a quality condition</param> /// <param name="issueRepository">The issue repository.</param> /// <param name="exceptionObjectRepository">The exception object repository</param> /// <param name="tileSize">Tile size for the quality verification.</param> /// <param name="getKeyFieldName">Function for getting the key field name for an object dataset</param> /// <param name="areaOfInterest">The test run perimeter (optional).</param> /// <param name="trackCancel">The cancel tracker.</param> /// <param name="errorCount">The number of (hard) errors.</param> /// <param name="warningCount">The number of warnings.</param> /// <param name="rowCountWithStopConditions">The number of rows for which a stop condition was violated - those rows may not be completely tested.</param> /// <returns></returns> public bool Verify([NotNull] QualitySpecification qualitySpecification, [NotNull] IDatasetContext datasetContext, [NotNull] IQualityConditionObjectDatasetResolver datasetResolver, [CanBeNull] IIssueRepository issueRepository, [CanBeNull] IExceptionObjectRepository exceptionObjectRepository, double tileSize, [CanBeNull] Func <IObjectDataset, string> getKeyFieldName, [CanBeNull] AreaOfInterest areaOfInterest, [CanBeNull] ITrackCancel trackCancel, out int errorCount, out int warningCount, out int rowCountWithStopConditions) { Assert.ArgumentNotNull(qualitySpecification, nameof(qualitySpecification)); Assert.ArgumentNotNull(datasetContext, nameof(datasetContext)); Assert.ArgumentNotNull(datasetResolver, nameof(datasetResolver)); Assert.ArgumentCondition(tileSize > 0, "Invalid tile size: {0}", tileSize); _verificationReportBuilder.BeginVerification(areaOfInterest); IEnumerable <QualitySpecificationElement> elements = GetOrderedElements(qualitySpecification).ToList(); IDictionary <ITest, QualitySpecificationElement> elementsByTest; IEnumerable <ITest> tests = CreateTests(elements, datasetContext, out elementsByTest).ToList(); var qualityConditionCount = 0; foreach (QualitySpecificationElement element in elements) { qualityConditionCount++; _verificationReportBuilder.AddVerifiedQualityCondition(element); } var datasetCount = 0; foreach (Dataset dataset in GetVerifiedDatasets(qualitySpecification, datasetContext)) { datasetCount++; _verificationReportBuilder.AddVerifiedDataset(dataset); } Stopwatch watch = _msg.DebugStartTiming(); LogTests(tests, elementsByTest); TestContainer testContainer = CreateTestContainer(tests, tileSize); LogBeginVerification(qualitySpecification, tileSize, areaOfInterest); IssueProcessor issueProcessor; ProgressProcessor progressProcessor; using (var issueWriter = new BufferedIssueWriter(_verificationReportBuilder, datasetContext, datasetResolver, issueRepository, getKeyFieldName)) { issueProcessor = CreateIssueProcessor(testContainer, issueWriter, areaOfInterest, exceptionObjectRepository, elementsByTest); progressProcessor = new ProgressProcessor(testContainer, elementsByTest, trackCancel); testContainer.QaError += (sender, args) => issueProcessor.Process(args); testContainer.TestingRow += delegate(object o, RowEventArgs args) { if (issueProcessor.HasStopCondition(args.Row)) { args.Cancel = true; } }; testContainer.ProgressChanged += (sender, args) => progressProcessor.Process(args); // run the tests TestExecutionUtils.Execute(testContainer, areaOfInterest); } _verificationReportBuilder.AddRowsWithStopConditions( issueProcessor.GetRowsWithStopConditions()); if (exceptionObjectRepository != null) { _verificationReportBuilder.AddExceptionStatistics( exceptionObjectRepository.ExceptionStatistics); } _verificationReportBuilder.EndVerification(progressProcessor.Cancelled); _msg.DebugStopTiming(watch, "Verification"); errorCount = issueProcessor.ErrorCount; warningCount = issueProcessor.WarningCount; rowCountWithStopConditions = issueProcessor.RowsWithStopConditionsCount; bool fulfilled = errorCount == 0 && !progressProcessor.Cancelled; LogResults(elements, issueProcessor, qualityConditionCount, datasetCount, fulfilled, progressProcessor.Cancelled, exceptionObjectRepository?.ExceptionStatistics); return(fulfilled); }
public ReshapeAlongCurveUsability CalculateSubcurves( IGeometry sourceGeometry, IPolyline targetPolyline, IList <CutSubcurve> resultList, ITrackCancel trackCancel) { Assert.ArgumentNotNull(sourceGeometry, nameof(sourceGeometry)); Assert.ArgumentNotNull(targetPolyline, nameof(targetPolyline)); Assert.ArgumentNotNull(resultList, nameof(resultList)); // calculate classic subcurves var classicCurves = new List <CutSubcurve>(); _standardSubcurveCalculator.CalculateSubcurves(sourceGeometry, targetPolyline, classicCurves, trackCancel); Predicate <CutSubcurve> canCut = cutSubcurve => (cutSubcurve.CanReshape || cutSubcurve.IsReshapeMemberCandidate) && GeometryUtils.InteriorIntersects( sourceGeometry, GeometryUtils.GetHighLevelGeometry(cutSubcurve.Path)); List <CutSubcurve> usableClassicCurves = classicCurves.Where(cutSubcurve => canCut(cutSubcurve)).ToList(); _msg.DebugFormat("Usable classic subcurves: {0} of {1}", usableClassicCurves.Count, classicCurves.Count); IPolygon sourcePolygon = sourceGeometry.GeometryType == esriGeometryType.esriGeometryPolygon ? (IPolygon)sourceGeometry : GeometryFactory.CreatePolygon(sourceGeometry); Stopwatch watch = _msg.DebugStartTiming( "Calculating additional cut lines using topological operator"); List <CutSubcurve> usableCutLines = CalculateUsableTopoOpCutLines( sourcePolygon, targetPolyline, usableClassicCurves); if (sourcePolygon != sourceGeometry) { Marshal.ReleaseComObject(sourcePolygon); } _msg.DebugStopTiming(watch, "Calculated {0} additional cut lines", usableCutLines.Count); foreach (CutSubcurve usableClassicCurve in usableClassicCurves) { resultList.Add(usableClassicCurve); } foreach (CutSubcurve usableTopoOpCutPath in usableCutLines) { resultList.Add(usableTopoOpCutPath); } return(usableClassicCurves.Count == 0 && usableCutLines.Count == 0 ? ReshapeAlongCurveUsability.NoReshapeCurves : ReshapeAlongCurveUsability.CanReshape); }
public void AddCrackPoints([NotNull] IFeature targetFeature, [NotNull] CrackPointCalculator crackPointCalculator) { // TODO: consider moving this to CrackUtils Stopwatch watch = _msg.DebugStartTiming("Calculating intersection points between {0} and {1}", GdbObjectUtils.ToString(Feature), GdbObjectUtils.ToString(targetFeature)); IPointCollection intersectionPoints = null; try { IGeometry targetGeometry = targetFeature.ShapeCopy; IGeometry originalGeometry = Feature.Shape; IPolyline clippedSource = OriginalClippedPolyline; GeometryUtils.EnsureSpatialReference(targetGeometry, clippedSource.SpatialReference); crackPointCalculator.SetDataResolution(Feature); IGeometry intersectionTarget; intersectionPoints = crackPointCalculator.GetIntersectionPoints( clippedSource, targetGeometry, out intersectionTarget); // TODO: if the target has a vertex closish (wrt tolerance) to the actual intersection point // the intersection point is somewhere in between. Consider snapping intersection points // to target vertices (this might be the start of clustering!) or use minimal tolerance! AddIntersectionPoints(intersectionPoints); IList <CrackPoint> crackPoints = crackPointCalculator.DetermineCrackPoints( intersectionPoints, originalGeometry, clippedSource, intersectionTarget); // TODO: rename to AddNonCrackablePoints / sort out whether drawing can happen straight from List<CrackPoint> AddCrackPoints(crackPoints); if (intersectionTarget != null && intersectionTarget != targetGeometry) { Marshal.ReleaseComObject(intersectionTarget); } Marshal.ReleaseComObject(targetGeometry); } catch (Exception e) { string message = $"Error calculationg crack points with target feature {RowFormat.Format(targetFeature)}: {e.Message}"; _msg.Debug(message, e); if (crackPointCalculator.ContinueOnException) { crackPointCalculator.FailedOperations.Add(Feature.OID, message); } else { throw; } } _msg.DebugStopTiming(watch, "Calculated and processed {0} intersection points", intersectionPoints?.PointCount); }
internal static IKeySet ReadKeySet([NotNull] ITable table, [NotNull] string keyField, [CanBeNull] string whereClause, esriFieldType keyFieldType, int keyFieldIndex) { Assert.ArgumentNotNull(table, nameof(table)); Assert.ArgumentNotNullOrEmpty(keyField, nameof(keyField)); Stopwatch watch = null; MemoryUsageInfo memoryUsage = null; if (_msg.IsVerboseDebugEnabled) { watch = _msg.DebugStartTiming(); memoryUsage = new MemoryUsageInfo(); memoryUsage.Refresh(); } IKeySet result = CreateKeySet(keyFieldType); var queryFilter = new QueryFilterClass { SubFields = keyField, WhereClause = whereClause }; string tableName = DatasetUtils.GetName(table); const bool recycle = true; foreach (IRow row in GdbQueryUtils.GetRows(table, queryFilter, recycle)) { object key = row.Value[keyFieldIndex]; if (key == DBNull.Value || key == null) { continue; } // TODO handle errors (e.g. invalid guid strings) bool added = result.Add(key); if (!added) { _msg.VerboseDebugFormat( "Ignored duplicate key found in field '{0}' in table '{1}': {2}", keyField, tableName, key); } } if (watch != null) { _msg.DebugStopTiming(watch, "Reading {0:N0} {1} keys from field '{2}' in table '{3}'", result.Count, keyFieldType, keyField, DatasetUtils.GetName(table)); _msg.DebugFormat("Memory usage of keys: {0}", memoryUsage); } return(result); }
private void VerifyByRelatedGeometry( [NotNull] IList <TestsWithRelatedGeometry> relGeomTests) { int tableIndex = -1; int tableCount = relGeomTests.Count; Stopwatch watch = _msg.DebugStartTiming(); var progressWatch = new ProgressWatch(args => container_OnProgressChanged(this, args)); foreach (TestsWithRelatedGeometry relGeomTest in relGeomTests) { tableIndex++; if (Cancelled) { return; } if (!relGeomTest.HasAnyAssociationsToFeatureClasses) { continue; } ITable table = relGeomTest.Table; IList <ITest> testsForTable = relGeomTest.Tests; IList <IRow> rows; using (progressWatch.MakeTransaction( Step.DataLoading, Step.DataLoaded, tableIndex, tableCount, table)) { rows = GetRowsByRelatedGeometry( table, Assert.NotNull(relGeomTest.ObjectDataset), testsForTable[0], Assert.NotNull(relGeomTest.RelClassChains)); } if (rows.Count == 0) { continue; } // there are rows found by related geometry var testIndex = 0; int testCount = testsForTable.Count; foreach (ITest test in testsForTable) { try { if (Cancelled) { return; } test.TestingRow += container_TestingRow; test.QaError += HandleError; using (progressWatch.MakeTransaction( Step.ITestProcessing, Step.ITestProcessed, testIndex, testCount, test)) { Verify(test, rows); } testIndex++; } finally { test.TestingRow -= container_TestingRow; test.QaError -= HandleError; } } // TODO: Verify this: // Because the _lastInvolvedRowsReferenceGeometry is tested for reference-equality // in GetReferenceGeometry() and for each error a new InvolvedRows list is created, // this should not really have an effect: //_lastInvolvedRowsReferenceGeometry = null; //_lastReferenceGeometry = null; } _msg.DebugStopTiming(watch, "VerifyByRelatedGeometry()"); }