public static async Task <MapPoint> CycloramaToMapPointAsync(double x, double y, double z) { MapView mapView = MapView.Active; Map map = mapView?.Map; SpatialReference mapSpatialReference = map?.SpatialReference; SpatialReference gsSpatialReference = await CycloramaSpatialReferenceAsync(); MapPoint point = null; await QueuedTask.Run(() => { MapPoint mapPoint = MapPointBuilder.CreateMapPoint(x, y, z, gsSpatialReference); if ((mapSpatialReference != null) && (gsSpatialReference != null) && (gsSpatialReference.Wkid != mapSpatialReference.Wkid)) { ProjectionTransformation projection = ProjectionTransformation.Create(gsSpatialReference, mapSpatialReference); point = GeometryEngine.Instance.ProjectEx(mapPoint, projection) as MapPoint; } else { point = (MapPoint)mapPoint.Clone(); } }); return(point); }
public async Task <MapPoint> AddHeightToMapPointAsync(MapPoint srcPoint) { return(await QueuedTask.Run(async() => { MapView mapView = MapView.Active; Map map = mapView.Map; SpatialReference srcSpatialReference = map.SpatialReference; SpatialReference dstSpatialReference = await CoordSystemUtils.CycloramaSpatialReferenceAsync(); ProjectionTransformation dstProjection = ProjectionTransformation.Create(srcSpatialReference, dstSpatialReference); MapPoint dstPoint = GeometryEngine.Instance.ProjectEx(srcPoint, dstProjection) as MapPoint; if (dstPoint != null) { double?height = await _cycloMediaGroupLayer.GetHeightAsync(dstPoint.X, dstPoint.Y); if (height != null) { dstPoint = MapPointBuilder.CreateMapPoint(dstPoint.X, dstPoint.Y, ((double)height), dstSpatialReference); ProjectionTransformation srcProjection = ProjectionTransformation.Create(dstSpatialReference, srcSpatialReference); srcPoint = GeometryEngine.Instance.ProjectEx(dstPoint, srcProjection) as MapPoint; } } return srcPoint; })); }
protected async Task InitializeAsync(RecordingLocation location, double angle, double hFov, Color color) { _angle = angle; _hFov = hFov; Color = color; _isInitialized = true; double x = location.X; double y = location.Y; Settings settings = Settings.Instance; MySpatialReference spatRel = settings.CycloramaViewerCoordinateSystem; await QueuedTask.Run(() => { Map map = MapView.Active?.Map; SpatialReference mapSpatialReference = map?.SpatialReference; SpatialReference spatialReference = spatRel?.ArcGisSpatialReference ?? mapSpatialReference; MapPoint point = MapPointBuilder.CreateMapPoint(x, y, spatialReference); if ((mapSpatialReference != null) && (spatialReference.Wkid != mapSpatialReference.Wkid)) { ProjectionTransformation projection = ProjectionTransformation.Create(spatialReference, mapSpatialReference); _mapPoint = GeometryEngine.Instance.ProjectEx(point, projection) as MapPoint; } else { _mapPoint = (MapPoint)point.Clone(); } }); MapViewCameraChangedEvent.Subscribe(OnMapViewCameraChanged); await RedrawConeAsync(); }
/// <summary> /// Create sample polyline feature using the geometries from the point feature layer. /// </summary> /// <param name="polylineLayer">Polyline geometry feature layer used to add the new features.</param> /// <param name="pointLayer">The geometries from the point layer are used as vertices for the new line features.</param> /// <returns></returns> private Task <bool> constructSamplePolylines(FeatureLayer polylineLayer, FeatureLayer pointLayer) { // execute the fine grained API calls on the CIM main thread return(QueuedTask.Run(() => { // get the underlying feature class for each layer var polylineFeatureClass = polylineLayer.GetTable() as FeatureClass; var pointFeatureClass = pointLayer.GetTable() as FeatureClass; // retrieve the feature class schema information for the feature classes var polylineDefinition = polylineFeatureClass.GetDefinition() as FeatureClassDefinition; var pointDefinition = pointFeatureClass.GetDefinition() as FeatureClassDefinition; // construct a cursor for all point features, since we want all feature there is no // QueryFilter required var pointCursor = pointFeatureClass.Search(null, false); // initialize a counter variable int pointCounter = 0; // initialize a list to hold 5 coordinates that are used as vertices for the polyline var lineCoordinates = new List <Coordinate>(5); // set up the edit operation for the feature creation var createOperation = new EditOperation(); createOperation.Name = "Create polylines"; createOperation.SelectNewFeatures = false; // set up the datum transformation to be used in the projection ProjectionTransformation transformation = ProjectionTransformation.CreateFromEnvironment(pointDefinition.GetSpatialReference(), polylineDefinition.GetSpatialReference()); // loop through the point features while (pointCursor.MoveNext()) { pointCounter++; var pointFeature = pointCursor.Current as Feature; // add the feature point geometry as a coordinate into the vertex list of the line // - ensure that the projection of the point geometry is converted to match the spatial reference of the line // with a datum transformation considering the different spheroids lineCoordinates.Add(((MapPoint)GeometryEngine.ProjectEx(pointFeature.GetShape(), transformation)).Coordinate); // for every 5 geometries, construct a new polyline and queue a feature create if (pointCounter % 5 == 0) { // construct a new polyline by using the 5 point coordinate in the current list var newPolyline = PolylineBuilder.CreatePolyline(lineCoordinates, polylineDefinition.GetSpatialReference()); // queue the create operation as part of the edit operation createOperation.Create(polylineLayer, newPolyline); // reset the list of coordinates lineCoordinates = new List <Coordinate>(5); } } // execute the edit (create) operation return createOperation.ExecuteAsync(); })); }
private static ArcGIS.Core.Geometry.Geometry SRTransform(ArcGIS.Core.Geometry.Geometry inGeometry, int inSRID, int outSRID) { ArcGIS.Core.Geometry.Geometry outGeometry; SpatialReference inSR = SpatialReferenceBuilder.CreateSpatialReference(inSRID); SpatialReference outSR = SpatialReferenceBuilder.CreateSpatialReference(outSRID); ProjectionTransformation transformer = ProjectionTransformation.Create(inSR, outSR); outGeometry = GeometryEngine.Instance.ProjectEx(inGeometry, transformer); return(outGeometry); }
private static async Task CreateCameraKeyframe(MapPoint orig_cameraPoint, ProjectionTransformation transformation, CameraTrack cameraTrack, TimeSpan currentTimespanValue) { await QueuedTask.Run(() => { Keyframe keyFrame = null; MapPoint projected_cameraPoint = (MapPoint)GeometryEngine.Instance.ProjectEx(orig_cameraPoint, transformation); var camera = new Camera(projected_cameraPoint.X, projected_cameraPoint.Y, _cameraZOffset, _keyframeHeading, null, CameraViewpoint.LookAt); keyFrame = cameraTrack.CreateKeyframe(camera, currentTimespanValue, AnimationTransition.FixedArc, .5); }); }
private Task <MapPoint> Project(MapPoint p, SpatialReference sr) { return(QueuedTask.Run(() => { SpatialReference srInput = SpatialReferenceBuilder.CreateSpatialReference(sr.LatestWkid); ProjectionTransformation projTransFromSRs = ProjectionTransformation.Create(srInput, SpatialReferences.WGS84); MapPoint projectedEnvEx = GeometryEngine.Instance.ProjectEx(p, projTransFromSRs) as MapPoint; return projectedEnvEx; })); }
private async Task OpenImageAsync(bool replace) { if (_api != null) { string location = ((dynamic)DataContext).Location; bool nearest = ((dynamic)DataContext).Nearest; _api.SetActiveViewerReplaceMode(replace); if (nearest) { MySpatialReference spatialReference = _settings.CycloramaViewerCoordinateSystem; SpatialReference thisSpatialReference = spatialReference.ArcGisSpatialReference ?? await spatialReference.CreateArcGisSpatialReferenceAsync(); if ((_lastSpatialReference != null) && (thisSpatialReference.Wkid != _lastSpatialReference.Wkid)) { string[] splitLoc = location.Split(','); CultureInfo ci = CultureInfo.InvariantCulture; double x = double.Parse(splitLoc.Length >= 1 ? splitLoc[0] : "0.0", ci); double y = double.Parse(splitLoc.Length >= 2 ? splitLoc[1] : "0.0", ci); MapPoint point = null; await QueuedTask.Run(() => { point = MapPointBuilder.CreateMapPoint(x, y, _lastSpatialReference); ProjectionTransformation projection = ProjectionTransformation.Create(_lastSpatialReference, thisSpatialReference); point = GeometryEngine.Instance.ProjectEx(point, projection) as MapPoint; }); if (point != null) { location = string.Format(ci, "{0},{1}", point.X, point.Y); DockPaneGlobeSpotter globeSpotter = ((dynamic)DataContext); globeSpotter.PropertyChanged -= OnGlobeSpotterPropertyChanged; ((dynamic)DataContext).Location = location; globeSpotter.PropertyChanged += OnGlobeSpotterPropertyChanged; } } _startOpenNearest = true; _api.OpenNearestImage(location, _settings.CtrlClickHashTag * _settings.CtrlClickDelta); } else { _api.OpenImage(location); } MySpatialReference cycloSpatialReference = _settings.CycloramaViewerCoordinateSystem; _lastSpatialReference = cycloSpatialReference.ArcGisSpatialReference ?? await cycloSpatialReference.CreateArcGisSpatialReferenceAsync(); } }
/// <summary> /// Creates a new camera offset from the provided camera around an ellipse. /// </summary> /// <param name="camera">The starting camera.</param> /// <param name="ellipse">The ellipse around which the camera will rotate.</param> /// <param name="centerPoint">The center point of the ellipse.</param> /// <param name="percentAlong">The percentage around the ellipse to create the camera.</param> private Camera OffsetCamera(Camera camera, Polyline ellipse, MapPoint centerPoint, double percentAlong) { camera = CloneCamera(camera); var fromPoint = GeometryEngine.MovePointAlongLine(ellipse, percentAlong, true, 0); var segment = LineBuilder.CreateLineSegment(new Coordinate2D(centerPoint.X, centerPoint.Y), new Coordinate2D(fromPoint.X, centerPoint.Y), centerPoint.SpatialReference); var difX = GeometryEngine.GeodesicLength(PolylineBuilder.CreatePolyline(segment, segment.SpatialReference)); if (centerPoint.X - fromPoint.X < 0) { difX *= -1; } segment = LineBuilder.CreateLineSegment(new Coordinate2D(centerPoint.X, centerPoint.Y), new Coordinate2D(centerPoint.X, fromPoint.Y), centerPoint.SpatialReference); var difY = GeometryEngine.GeodesicLength(PolylineBuilder.CreatePolyline(segment, segment.SpatialReference)); if (centerPoint.Y - fromPoint.Y < 0) { difY *= -1; } var radian = Math.Atan2(difX, difY); var heading = radian * -180 / Math.PI; camera.Heading = heading; var difZ = centerPoint.Z - (camera.Z * ((camera.SpatialReference.IsGeographic) ? 1.0 : camera.SpatialReference.Unit.ConversionFactor)); var hypotenuse = GeometryEngine.GeodesicDistance(fromPoint, centerPoint); radian = Math.Atan2(difZ, hypotenuse); var pitch = radian * 180 / Math.PI; camera.Pitch = pitch; if (fromPoint.SpatialReference.Wkid != camera.SpatialReference.Wkid) { var transformation = ProjectionTransformation.Create(fromPoint.SpatialReference, camera.SpatialReference); fromPoint = GeometryEngine.ProjectEx(fromPoint, transformation) as MapPoint; } camera.X = fromPoint.X; camera.Y = fromPoint.Y; return(camera); }
protected override async void OnClick() { FeatureLayer ftrLayer = (MapView.Active.Map.Layers.First(layer => layer.Name.Equals("MyRide")) as FeatureLayer); ProjectionTransformation transformation = await QueuedTask.Run(() => ProjectionTransformation.Create(ftrLayer.GetSpatialReference(), MapView.Active.Map.SpatialReference)); SpatialReference layerSpatRef = await QueuedTask.Run(() => ftrLayer.GetSpatialReference()); Polyline lineGeom = await GetPolyFineFromLayer(ftrLayer); //couldn't get the selected feature if (lineGeom == null) { return; } var animation = MapView.Active.Map.Animation; var cameraTrack = animation.Tracks.OfType <CameraTrack>().First(); var keyframes = cameraTrack.Keyframes; //Get segment list for line ReadOnlyPartCollection polylineParts = lineGeom.Parts; //get total segment count and determine path length double pathLength = 0; int segmentCount = 0; IEnumerator <ReadOnlySegmentCollection> segments = polylineParts.GetEnumerator(); while (segments.MoveNext()) { ReadOnlySegmentCollection seg = segments.Current; foreach (Segment s in seg) { double length3D = Math.Sqrt((s.EndPoint.X - s.StartPoint.X) * (s.EndPoint.X - s.StartPoint.X) + (s.EndPoint.Y - s.StartPoint.Y) * (s.EndPoint.Y - s.StartPoint.Y) + (s.EndPoint.Z - s.StartPoint.Z) * (s.EndPoint.Z - s.StartPoint.Z)); pathLength += length3D; segmentCount += 1; } } await CreateKeyframes_AtVertices(MapView.Active, layerSpatRef, transformation, cameraTrack, segments, segmentCount, pathLength); }
public async Task UpdateAsync(double x, double y, double size) { Settings settings = Settings.Instance; MySpatialReference spatRel = settings.CycloramaViewerCoordinateSystem; await QueuedTask.Run(() => { MapView thisView = MapView.Active; Map map = thisView?.Map; SpatialReference mapSpatialReference = map?.SpatialReference; SpatialReference spatialReference = spatRel?.ArcGisSpatialReference ?? mapSpatialReference; MapPoint point = MapPointBuilder.CreateMapPoint(x, y, spatialReference); MapPoint mapPoint; if (mapSpatialReference != null && spatialReference.Wkid != mapSpatialReference.Wkid) { ProjectionTransformation projection = ProjectionTransformation.Create(spatialReference, mapSpatialReference); mapPoint = GeometryEngine.Instance.ProjectEx(point, projection) as MapPoint; } else { mapPoint = (MapPoint)point.Clone(); } if (mapPoint != null && !mapPoint.IsEmpty) { CIMColor cimColor = ColorFactory.Instance.CreateColor(Color.Black); CIMMarker cimMarker = SymbolFactory.Instance.ConstructMarker(cimColor, size, SimpleMarkerStyle.Cross); CIMPointSymbol pointSymbol = SymbolFactory.Instance.ConstructPointSymbol(cimMarker); CIMSymbolReference pointSymbolReference = pointSymbol.MakeSymbolReference(); IDisposable disposeCross = thisView.AddOverlay(mapPoint, pointSymbolReference); _disposeCross?.Dispose(); _disposeCross = disposeCross; } }); }
private Task <GeospatialBox> GetCurrentViewport() { Debug.Assert(MapView.Active != null); if (MapView.Active == null) { return(Task.FromResult(GeospatialBox.FullExtent)); } return(QueuedTask.Run(() => { var extent = MapView.Active.Extent; var extentEnvelope = EnvelopeBuilder.CreateEnvelope(extent.XMin, extent.YMin, extent.XMax, extent.YMax, extent.SpatialReference); ProjectionTransformation pxForm = ProjectionTransformation.Create(extent.SpatialReference, SpatialReferences.WGS84); var wgsEnvelope = GeometryEngine.Instance.ProjectEx(extentEnvelope, pxForm) as Envelope; return new GeospatialBox() { North = Math.Min(wgsEnvelope.YMax, 90.0), // north up to +90 deg South = Math.Max(wgsEnvelope.YMin, -90.0), // south up to -90 deg West = Math.Max(wgsEnvelope.XMin, -180.0), // west up to -180 deg East = Math.Min(wgsEnvelope.XMax, 180.0) // east up to +180 deg }; })); }
/// <summary> /// Create keyframes centered around a point. /// </summary> /// <param name="point">The center point around which the keyframes are created.</param> internal Task CreateKeyframesAroundPoint(MapPoint point) { return(QueuedTask.Run(() => { var mapView = MapView.Active; var degrees = Animation.Settings.Degrees; if (mapView == null || degrees == 0) { return; } //Get the camera track from the active map's animation. //There will always be only one camera track in the animation. var cameraTrack = mapView.Map.Animation.Tracks.OfType <CameraTrack>().First(); var camera = mapView.Camera; //Calculate the number of keys to create. var keyEvery = (degrees < 0) ? -10 : 10; //10 degrees var numOfKeys = Math.Floor(degrees / keyEvery); var remainder = degrees % keyEvery; //To maintain a constant speed we need to divide the total time we want the animation to take by the number of degrees of rotation. var duration = Animation.Settings.Duration; double timeInterval = duration / Math.Abs(degrees); double currentTimeSeconds = GetInsertTime(mapView.Map.Animation); //Get the distance from the current location to the point we want to rotate around to get the radius. var cameraPoint = MapPointBuilder.CreateMapPoint(camera.X, camera.Y, camera.SpatialReference); var radius = GeometryEngine.GeodesicDistance(cameraPoint, point); var radian = ((camera.Heading - 90) / 180.0) * Math.PI; //If the spatial reference of the point is projected and the unit is not in meters we need to convert the Z values to meters. if (!point.SpatialReference.IsGeographic && point.SpatialReference.Unit.ConversionFactor != 1.0) { point = MapPointBuilder.CreateMapPoint(point.X, point.Y, point.Z * point.SpatialReference.Unit.ConversionFactor, point.SpatialReference); } //For all geodesic calculations we will use WGS84 so we will project the point if it is not already. if (point.SpatialReference.Wkid != SpatialReferences.WGS84.Wkid) { var transformation = ProjectionTransformation.Create(point.SpatialReference, SpatialReferences.WGS84); point = GeometryEngine.ProjectEx(point, transformation) as MapPoint; } //Create an ellipse around the center point. var parameter = new GeometryEngine.GeodesicEllipseParameter(); parameter.Center = point.Coordinate; parameter.SemiAxis1Length = radius; parameter.SemiAxis2Length = radius; parameter.AxisDirection = radian; parameter.LinearUnit = LinearUnit.Meters; parameter.OutGeometryType = GeometryType.Polyline; parameter.VertexCount = 36; var ellipse = GeometryEngine.GeodesicEllipse(parameter, point.SpatialReference) as Polyline; //For each key we will progressively rotate around the ellipse and calculate the camera position at each. for (int i = 0; i <= numOfKeys; i++) { var percentAlong = ((Math.Abs(keyEvery) * i) % 360) / 360.0; if (keyEvery > 0) { percentAlong = 1 - percentAlong; } //Get the camera at the position around the ellipse. camera = OffsetCamera(camera, ellipse, point, percentAlong); //Increment the time by the amount of time per key. if (i != 0) { currentTimeSeconds += (timeInterval * Math.Abs(keyEvery)); } //Create a new keyframe for the camera. cameraTrack.CreateKeyframe(camera, TimeSpan.FromSeconds(currentTimeSeconds), AnimationTransition.FixedArc); } //For any degree rotation left over create a keyframe. For example 155, would have a keyframe every 10 degrees and then one for the final 5 degrees. if (remainder != 0.0) { var percentAlong = ((Math.Abs(keyEvery) * numOfKeys + Math.Abs(remainder)) % 360) / 360.0; if (remainder > 0) { percentAlong = 1 - percentAlong; } OffsetCamera(camera, ellipse, point, percentAlong); //Increment the time and create the keyframe. currentTimeSeconds += (timeInterval * Math.Abs(remainder)); cameraTrack.CreateKeyframe(camera, TimeSpan.FromSeconds(currentTimeSeconds), AnimationTransition.FixedArc); } })); }
public async Task UpdateMeasurementPointsAsync(Geometry geometry) { if ((geometry != null) && (!_updateMeasurement)) { _updateMeasurement = true; List <MapPoint> ptColl = await ToPointCollectionAsync(geometry); if (ptColl != null) { int msPoints = Count; var toRemove = new Dictionary <MeasurementPoint, bool>(); var toAdd = new List <MapPoint>(); bool toRemoveFrom = false; for (int i = 0; i < msPoints; i++) { MeasurementPoint measurementPoint = GetPointByNr(i); if ((measurementPoint != null) && (((!measurementPoint.NotCreated) && (!IsPointMeasurement)) || (IsPointMeasurement && (PointNr >= 1)))) { toRemove.Add(measurementPoint, true); } } for (int j = 0; j < PointNr; j++) { MapPoint point = ptColl[j]; var measurementPoint = GetPoint(point); if (measurementPoint == null) { toAdd.Add(point); toRemoveFrom = true; } else { if (!toRemoveFrom) { if (toRemove.ContainsKey(measurementPoint)) { toRemove[measurementPoint] = false; } } else { toAdd.Add(point); } } } if (toRemove.Aggregate(false, (current, remove) => remove.Value || current) || (toAdd.Count >= 1)) { if (!IsPointMeasurement) { DisableMeasurementSeries(); } foreach (var elem in toRemove) { if (elem.Value && GlobeSpotterConfiguration.MeasurePermissions) { MeasurementPoint msPoint = elem.Key; int pointId = msPoint.PointId; _api?.RemoveMeasurementPoint(EntityId, pointId); } } foreach (var point in toAdd) { MapView mapView = MapView.Active; Map map = mapView?.Map; SpatialReference mapSpatRef = map?.SpatialReference; MySpatialReference myCyclSpatRef = _settings.CycloramaViewerCoordinateSystem; SpatialReference cyclSpatRef = (myCyclSpatRef == null) ? mapSpatRef : (myCyclSpatRef.ArcGisSpatialReference ?? (await myCyclSpatRef.CreateArcGisSpatialReferenceAsync())); SpatialReference layerSpatRef = point.SpatialReference ?? cyclSpatRef; MapPoint copyGsPoint = null; await QueuedTask.Run(() => { ProjectionTransformation projection = ProjectionTransformation.Create(layerSpatRef, cyclSpatRef); copyGsPoint = GeometryEngine.Instance.ProjectEx(point, projection) as MapPoint; }); CreateMeasurementPoint(copyGsPoint); } if (!IsPointMeasurement) { EnableMeasurementSeries(); } } } _updateMeasurement = false; } }
/// <summary> /// asynchronous function to request or this spatial reference exists in the current area /// </summary> public async Task <bool> ExistsInAreaAsync() { await QueuedTask.Run(() => { if (ArcGisSpatialReference == null) { CreateSpatialReference(); } if (ArcGisSpatialReference != null) { MapView activeView = MapView.Active; Envelope envelope = null; if (activeView == null) { FileSettings settings = FileSettings.Instance; SpatialReference spatialReference = settings.CycloramaViewerCoordinateSystem; if (spatialReference != null) { if (spatialReference.ArcGisSpatialReference == null) { spatialReference.CreateSpatialReference(); } if (spatialReference.ArcGisSpatialReference != null) { Bounds bounds = spatialReference.NativeBounds; var minCoordinate = new Coordinate2D(bounds.MinX, bounds.MinY); var maxCoordinate = new Coordinate2D(bounds.MaxX, bounds.MaxY); envelope = EnvelopeBuilder.CreateEnvelope(minCoordinate, maxCoordinate, spatialReference.ArcGisSpatialReference); } } } else { envelope = activeView.Extent; } if (envelope != null) { ArcGISSpatialReference spatEnv = envelope.SpatialReference; int spatEnvFactoryCode = spatEnv?.Wkid ?? 0; if (spatEnv != null && spatEnvFactoryCode != ArcGisSpatialReference.Wkid) { try { ProjectionTransformation projection = ProjectionTransformation.Create(envelope.SpatialReference, ArcGisSpatialReference); if (!(GeometryEngine.Instance.ProjectEx(envelope, projection) is Envelope copyEnvelope) || copyEnvelope.IsEmpty) { ArcGisSpatialReference = null; } else { if (NativeBounds != null) { double xMin = NativeBounds.MinX; double yMin = NativeBounds.MinY; double xMax = NativeBounds.MaxX; double yMax = NativeBounds.MaxY; if (copyEnvelope.XMin < xMin || copyEnvelope.XMax > xMax || copyEnvelope.YMin < yMin || copyEnvelope.YMax > yMax) { ArcGisSpatialReference = null; } } } } catch (ArgumentException) { ArcGisSpatialReference = null; } }
private async Task RedrawConeAsync() { await QueuedTask.Run(() => { GlobeSpotter globeSpotter = GlobeSpotter.Current; if ((globeSpotter.InsideScale()) && (!_mapPoint.IsEmpty) && (Color != null)) { var thisColor = (SystCol)Color; MapView thisView = MapView.Active; Map map = thisView.Map; SpatialReference mapSpat = map.SpatialReference; SpatialReference mapPointSpat = _mapPoint.SpatialReference; ProjectionTransformation projection = ProjectionTransformation.Create(mapPointSpat, mapSpat); _mapPoint = GeometryEngine.Instance.ProjectEx(_mapPoint, projection) as MapPoint; WinPoint point = thisView.MapToScreen(_mapPoint); double angleh = (_hFov *Math.PI) / 360; double angle = (((270 + _angle) % 360) * Math.PI) / 180; double angle1 = angle - angleh; double angle2 = angle + angleh; double x = point.X; double y = point.Y; double size = Size / 2; WinPoint screenPoint1 = new WinPoint((x + (size * Math.Cos(angle1))), (y + (size * Math.Sin(angle1)))); WinPoint screenPoint2 = new WinPoint((x + (size * Math.Cos(angle2))), (y + (size * Math.Sin(angle2)))); MapPoint point1 = thisView.ScreenToMap(screenPoint1); MapPoint point2 = thisView.ScreenToMap(screenPoint2); IList <MapPoint> polygonPointList = new List <MapPoint>(); polygonPointList.Add(_mapPoint); polygonPointList.Add(point1); polygonPointList.Add(point2); polygonPointList.Add(_mapPoint); Polygon polygon = PolygonBuilder.CreatePolygon(polygonPointList); Color colorPolygon = SystCol.FromArgb(_blinking ? BlinkAlpha : NormalAlpha, thisColor); CIMColor cimColorPolygon = ColorFactory.Instance.CreateColor(colorPolygon); CIMPolygonSymbol polygonSymbol = SymbolFactory.Instance.DefaultPolygonSymbol; polygonSymbol.SetColor(cimColorPolygon); polygonSymbol.SetOutlineColor(null); CIMSymbolReference polygonSymbolReference = polygonSymbol.MakeSymbolReference(); IDisposable disposePolygon = thisView.AddOverlay(polygon, polygonSymbolReference); IList <MapPoint> linePointList = new List <MapPoint>(); linePointList.Add(point1); linePointList.Add(_mapPoint); linePointList.Add(point2); Polyline polyline = PolylineBuilder.CreatePolyline(linePointList); Color colorLine = _active ? SystCol.Yellow : SystCol.Gray; CIMColor cimColorLine = ColorFactory.Instance.CreateColor(colorLine); CIMLineSymbol cimLineSymbol = SymbolFactory.Instance.DefaultLineSymbol; cimLineSymbol.SetColor(cimColorLine); cimLineSymbol.SetSize(_blinking ? BorderSizeBlinking : BorderSize); CIMSymbolReference lineSymbolReference = cimLineSymbol.MakeSymbolReference(); IDisposable disposePolyLine = thisView.AddOverlay(polyline, lineSymbolReference); _disposePolygon?.Dispose(); _disposePolygon = disposePolygon; _disposePolyLine?.Dispose(); _disposePolyLine = disposePolyLine; if (_blinking) { var blinkEvent = new AutoResetEvent(true); var blinkTimerCallBack = new TimerCallback(ResetBlinking); _blinkTimer = new Timer(blinkTimerCallBack, blinkEvent, BlinkTime, -1); } } else { _disposePolygon?.Dispose(); _disposePolyLine?.Dispose(); } }); }
private static async Task CreateCameraKeyframe(MapView mapView, MapPoint orig_cameraPoint, ProjectionTransformation transformation, CameraTrack cameraTrack, TimeSpan currentTimespanValue, double pitch, double heading, bool ignoreRotation = false, bool ignoreTranslation = false) { await QueuedTask.Run(() => { Keyframe keyFrame = null; MapPoint projected_cameraPoint = (MapPoint)GeometryEngine.Instance.ProjectEx(orig_cameraPoint, transformation); if (mapView.ViewingMode == MapViewingMode.Map) { var camera = new Camera(projected_cameraPoint.X, projected_cameraPoint.Y, CameraZOffset, heading, null, CameraViewpoint.LookAt); keyFrame = cameraTrack.CreateKeyframe(camera, currentTimespanValue, AnimationTransition.FixedArc, .5); } else { var camera = new Camera(projected_cameraPoint.X, projected_cameraPoint.Y, (projected_cameraPoint.Z + CameraZOffset), pitch, heading, null, CameraViewpoint.LookAt); keyFrame = cameraTrack.CreateKeyframe(camera, currentTimespanValue, AnimationTransition.FixedArc, .5); } if (ignoreRotation) { CameraKeyframe camKey = keyFrame as CameraKeyframe; camKey.HeadingTransition = AnimationTransition.None; camKey.RollTransition = AnimationTransition.None; camKey.PitchTransition = AnimationTransition.None; } if (ignoreTranslation) { CameraKeyframe camKey = keyFrame as CameraKeyframe; camKey.XTransition = AnimationTransition.None; camKey.YTransition = AnimationTransition.None; camKey.ZTransition = AnimationTransition.None; } }); }
//Use this method if you want keyframes ONLY at line vertices. This is good if the line is highly densified. //However, you will get sharp turns at corners because there is no attempt to smooth the animation public static async Task CreateKeyframes_AtVertices(MapView mapView, SpatialReference layerSpatRef, ProjectionTransformation transformation, CameraTrack cameraTrack, Polyline lineGeom, IEnumerator <ReadOnlySegmentCollection> segments, int segmentCount, double pathLength) { double segmentLength = 0; int num_iterations = 0; segments.Reset(); //process each segment depending upon its type - straight line or arc while (segments.MoveNext()) { ReadOnlySegmentCollection seg = segments.Current; double accumulatedDuration = mapView.Map.Animation.Duration.TotalSeconds + ((mapView.Map.Animation.Duration.TotalSeconds > 0) ? ANIMATION_APPEND_TIME : 0); // 0; foreach (Segment s in seg) { segmentLength = Math.Sqrt((s.EndPoint.X - s.StartPoint.X) * (s.EndPoint.X - s.StartPoint.X) + (s.EndPoint.Y - s.StartPoint.Y) * (s.EndPoint.Y - s.StartPoint.Y) + (s.EndPoint.Z - s.StartPoint.Z) * (s.EndPoint.Z - s.StartPoint.Z)); double segmentDuration = (TotalDuration / pathLength) * segmentLength; MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z, layerSpatRef)); //create keyframe at start vertex of path in map space double timeSpanValue = accumulatedDuration; TimeSpan keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(startPt, endPt); await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at end point of segment only for the end point of last segment //Otherwise we will get duplicate keyframes at end of one segment and start of the next one if (num_iterations == segmentCount - 1) { timeSpanValue = accumulatedDuration + segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (SelectedCameraView == "Face target") { SetPitchAndHeadingForLine(endPt, TargetPoint); } await CreateCameraKeyframe(mapView, endPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } accumulatedDuration += segmentDuration; num_iterations++; } } }
//Use this method to create a keyframe at every n-second of the specified animation duration public static async Task CreateKeyframes_EveryNSeconds(MapView mapView, SpatialReference layerSpatRef, ProjectionTransformation transformation, CameraTrack cameraTrack, IEnumerator <ReadOnlySegmentCollection> segments, int segmentCount, double pathLength, double keyEveryNSecond = 1) { double segmentLength = 0; int numKeysToCreate = (int)(TotalDuration / keyEveryNSecond); //approximately double createKeyAtDist = pathLength / numKeysToCreate; double skippedDistance = 0; double accumulatedDuration = mapView.Map.Animation.Duration.TotalSeconds + ((mapView.Map.Animation.Duration.TotalSeconds > 0) ? ANIMATION_APPEND_TIME : 0); // 0; int num_iterations = 0; segments.Reset(); List <MapPoint> pointsForKeyframes = new List <MapPoint>(); MapPoint pathEndPt = null; //process each segment depending upon its type - straight line or arc while (segments.MoveNext()) { ReadOnlySegmentCollection seg = segments.Current; foreach (Segment s in seg) { segmentLength = Math.Sqrt((s.EndPoint.X - s.StartPoint.X) * (s.EndPoint.X - s.StartPoint.X) + (s.EndPoint.Y - s.StartPoint.Y) * (s.EndPoint.Y - s.StartPoint.Y) + (s.EndPoint.Z - s.StartPoint.Z) * (s.EndPoint.Z - s.StartPoint.Z)); double segmentDuration = (TotalDuration / pathLength) * segmentLength; //straight line segments if (s.SegmentType == SegmentType.Line) { MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z, layerSpatRef)); //add start of path to points collection if (num_iterations == 0) { pointsForKeyframes.Add(startPt); } if (num_iterations == segmentCount - 1 || segmentCount == 1) { pathEndPt = endPt; //store path end pt. This will be the last keyframe. } double distCoveredAlongSeg = Math.Abs(createKeyAtDist - skippedDistance); //we are accouunting for skipped distances from previous segments if (distCoveredAlongSeg < segmentLength) { MapPoint keyPt = await CreatePointAlongSegment(startPt, endPt, distCoveredAlongSeg, layerSpatRef); //add point to collection pointsForKeyframes.Add(keyPt); //skipped distance is used now, reset to zero skippedDistance = 0; //are more keyframes possible for this segment bool moreKeysPossible = ((segmentLength - distCoveredAlongSeg) >= createKeyAtDist); while (moreKeysPossible) { double keyAtDistAlongSeg = distCoveredAlongSeg + createKeyAtDist; keyPt = await CreatePointAlongSegment(startPt, endPt, keyAtDistAlongSeg, layerSpatRef); //add point to collection pointsForKeyframes.Add(keyPt); distCoveredAlongSeg += createKeyAtDist; moreKeysPossible = ((segmentLength - distCoveredAlongSeg) > createKeyAtDist); } //if any segment length left then add to skipped distance skippedDistance += (segmentLength - distCoveredAlongSeg); } else { //add this segment's length to skipped distance as no keyframe could be created along it skippedDistance += segmentLength; } } else if (s.SegmentType == SegmentType.EllipticArc) { EllipticArcSegment ellipArc = s as EllipticArcSegment; MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z, layerSpatRef)); double radius = Math.Sqrt((ellipArc.CenterPoint.X - startPt.X) * (ellipArc.CenterPoint.X - startPt.X) + (ellipArc.CenterPoint.Y - startPt.Y) * (ellipArc.CenterPoint.Y - startPt.Y)); double angle = ellipArc.CentralAngle; MapPoint centerPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(ellipArc.CenterPoint.X, ellipArc.CenterPoint.Y, (s.StartPoint.Z + s.EndPoint.Z) / 2, layerSpatRef)); //add start of path to points collection if (num_iterations == 0) { pointsForKeyframes.Add(startPt); } if (num_iterations == segmentCount - 1 || segmentCount == 1) { pathEndPt = endPt; //store path end pt. This will be the last keyframe. } double distCoveredAlongSeg = Math.Abs(createKeyAtDist - skippedDistance); //we are accouunting for skipped distances from previous segments if (distCoveredAlongSeg < segmentLength) { MapPoint keyPt = await CreatePointAlongArc(startPt, endPt, centerPt, angle *distCoveredAlongSeg / segmentLength, radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); //add point to collection pointsForKeyframes.Add(keyPt); //skipped distance is used now, reset to zero skippedDistance = 0; //are more keyframes possible for this segment bool moreKeysPossible = ((segmentLength - distCoveredAlongSeg) >= createKeyAtDist); while (moreKeysPossible) { double keyAtDistAlongSeg = distCoveredAlongSeg + createKeyAtDist; keyPt = await CreatePointAlongArc(startPt, endPt, centerPt, angle *keyAtDistAlongSeg / segmentLength, radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); //add point to collection pointsForKeyframes.Add(keyPt); distCoveredAlongSeg += createKeyAtDist; moreKeysPossible = ((segmentLength - distCoveredAlongSeg) > createKeyAtDist); } //if any segment length left then add to skipped distance skippedDistance += (segmentLength - distCoveredAlongSeg); } else { //add this segment's length to skipped distance as no keyframe could be created along it skippedDistance += segmentLength; } } num_iterations++; } } //now iterate over the points list and create keyframes double timeSpanValue = accumulatedDuration; TimeSpan keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); for (int i = 0; i < pointsForKeyframes.Count; i++) { MapPoint currentPt = pointsForKeyframes[i]; MapPoint nextPt = null; if (i + 1 < pointsForKeyframes.Count) { nextPt = pointsForKeyframes[i + 1]; } else { nextPt = pathEndPt; } timeSpanValue = i * keyEveryNSecond + accumulatedDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(currentPt, nextPt); await CreateCameraKeyframe(mapView, currentPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); if (i == pointsForKeyframes.Count - 1 && skippedDistance > 0) { keyframeTimespan = TimeSpan.FromSeconds(TotalDuration + accumulatedDuration); await CreateCameraKeyframe(mapView, pathEndPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } } }
//Use this method for smoother turns at corners. Additionally this method processes straight line segments and arc segments separately //For arc segments a keyframe is created at every second. However a minimum of 5 keyframes are created for arcs. //So if arc segment length is less than 5 then we default to at least 5 keyframes. This is an attempt to stick to the path as much as possible. //For straight line segments, rotation is ignored at end point of each segment except for the end point of the path itself. Two keyframes with rotation //are created at certain distance (determined by LINE_CONSTRAINT_FACTOR) before and after the end point of each segment. This is an attempt to avoid //sharp turns at corners along the path. public static async Task CreateKeyframes_AlongPath(MapView mapView, SpatialReference layerSpatRef, ProjectionTransformation transformation, CameraTrack cameraTrack, IEnumerator <ReadOnlySegmentCollection> segments, int segmentCount, double pathLength) { double segmentLength = 0; int num_iterations = 0; segments.Reset(); //process each segment depending upon its type - straight line or arc while (segments.MoveNext()) { ReadOnlySegmentCollection seg = segments.Current; double accumulatedDuration = mapView.Map.Animation.Duration.TotalSeconds + ((mapView.Map.Animation.Duration.TotalSeconds > 0) ? ANIMATION_APPEND_TIME : 0); // 0; foreach (Segment s in seg) { double length3D = Math.Sqrt((s.EndPoint.X - s.StartPoint.X) * (s.EndPoint.X - s.StartPoint.X) + (s.EndPoint.Y - s.StartPoint.Y) * (s.EndPoint.Y - s.StartPoint.Y) + (s.EndPoint.Z - s.StartPoint.Z) * (s.EndPoint.Z - s.StartPoint.Z)); double segmentDuration = (TotalDuration / pathLength) * length3D; segmentLength = length3D; //straight line segments if (s.SegmentType == SegmentType.Line) { MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z *Z_CONVERSION_FACTOR, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z *Z_CONVERSION_FACTOR, layerSpatRef)); //we will be creating three intermediate keyframes for staright segments only if segment length is more than a set threshold //the threshold is just a guess and might have to be altered depending upon the path geometry. Should work for most cases though MapPoint firstIntPoint = null; MapPoint midIntPoint = null; MapPoint lastIntPoint = null; if (segmentLength >= STRAIGHT_SEGMENT_LENGTH_THRESHOLD) { //first intermediate point firstIntPoint = await CreatePointAlongSegment(startPt, endPt, LINE_CONSTRAINT_FACTOR *segmentLength, layerSpatRef); //mid point midIntPoint = await CreatePointAlongSegment(startPt, endPt, 0.5 *segmentLength, layerSpatRef); //last intermediate point lastIntPoint = await CreatePointAlongSegment(startPt, endPt, (1 - LINE_CONSTRAINT_FACTOR) *segmentLength, layerSpatRef); } //create keyframe at start vertex of path in map space double timeSpanValue = accumulatedDuration; TimeSpan keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (segmentLength >= STRAIGHT_SEGMENT_LENGTH_THRESHOLD) { SetPitchAndHeadingForLine(startPt, firstIntPoint); } else { SetPitchAndHeadingForLine(startPt, endPt); } //ignore rotation for all start vertices (which would also be end vertices of previous segments) EXCEPT for the first vertex of path if (num_iterations == 0 || segmentLength < STRAIGHT_SEGMENT_LENGTH_THRESHOLD) { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } else { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading, true, false); } if (segmentLength > STRAIGHT_SEGMENT_LENGTH_THRESHOLD) { //Create a keyframe at PATH_CONSTRAINT_FACTOR distance along the segment from start point double distanceAlong = LINE_CONSTRAINT_FACTOR * segmentLength; timeSpanValue = accumulatedDuration + LINE_CONSTRAINT_FACTOR * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(firstIntPoint, midIntPoint); await CreateCameraKeyframe(mapView, firstIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at middle of segment distanceAlong = 0.5 * segmentLength; timeSpanValue = accumulatedDuration + 0.5 * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(midIntPoint, lastIntPoint); //await CreateCameraKeyframe(mapView, midIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at (1 - PATH_CONSTRAINT_FACTOR) distance along the segment from start point distanceAlong = (1 - LINE_CONSTRAINT_FACTOR) * segmentLength; timeSpanValue = accumulatedDuration + (1 - LINE_CONSTRAINT_FACTOR) * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(lastIntPoint, endPt); await CreateCameraKeyframe(mapView, lastIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } //Create a keyframe at end point of segment only for the end point of last segment //Otherwise we will get duplicate keyframes at end of one segment and start of the next one if (num_iterations == segmentCount - 1) { timeSpanValue = accumulatedDuration + segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (SelectedCameraView == "Face target") { SetPitchAndHeadingForLine(endPt, TargetPoint); } await CreateCameraKeyframe(mapView, endPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } } //processing for arcs - create a keyframe every second for arcs //we will create a minimum of 5 keyframes along the arc else if (s.SegmentType == SegmentType.EllipticArc && segmentDuration > 5) { EllipticArcSegment ellipArc = s as EllipticArcSegment; MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z, layerSpatRef)); double radius = Math.Sqrt((ellipArc.CenterPoint.X - startPt.X) * (ellipArc.CenterPoint.X - startPt.X) + (ellipArc.CenterPoint.Y - startPt.Y) * (ellipArc.CenterPoint.Y - startPt.Y)); double angle = ellipArc.CentralAngle; MapPoint centerPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(ellipArc.CenterPoint.X, ellipArc.CenterPoint.Y, (s.StartPoint.Z + s.EndPoint.Z) / 2, layerSpatRef)); int num_keys = (int)segmentDuration; MapPoint firstIntPoint = null; //first intermediate keyframe for arc - needed for setting heading for start vertex // >2 to account for start and end if (num_keys > 2) { firstIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle / (num_keys - 1), radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); } //Create keyframe at start vertex of path in map space double timeSpanValue = accumulatedDuration; TimeSpan keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (firstIntPoint != null) { SetPitchAndHeadingForLine(startPt, firstIntPoint); } else { SetPitchAndHeadingForLine(startPt, endPt); } //Ignore rotation for all start vertices EXCEPT for the first vertex of path if (num_iterations == 0) { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } else { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading, true, false); } //Loop to create intermediate keyframes at each second for (int i = 0; i < num_keys - 2; i++) { MapPoint currentIntPoint = null; MapPoint nextIntPoint = null; currentIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, (angle / (num_keys - 1)) *(i + 1), radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); if (i < num_keys - 3) { nextIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, (angle / (num_keys - 1)) *(i + 2), radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); } else //for the last intermediate keyframe, heading/pitch has to be determined relative to the end point fo segment { nextIntPoint = endPt; } //timeSpanValue = accumulatedDuration + (i + 1) * 1; //at each second timeSpanValue = accumulatedDuration + (i + 1) * (segmentDuration / (num_keys - 1)); keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(currentIntPoint, nextIntPoint); await CreateCameraKeyframe(mapView, currentIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } //Create a keyframe at end point of segment only for the end point of last segment if (num_iterations == segmentCount - 1) { timeSpanValue = accumulatedDuration + segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (SelectedCameraView == "Face target") { SetPitchAndHeadingForLine(endPt, TargetPoint); } await CreateCameraKeyframe(mapView, endPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } } //create a minimum of 5 keyframes along the arc else if (s.SegmentType == SegmentType.EllipticArc) { EllipticArcSegment ellipArc = s as EllipticArcSegment; MapPoint startPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.StartPoint.X, s.StartPoint.Y, s.StartPoint.Z, layerSpatRef)); MapPoint endPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(s.EndPoint.X, s.EndPoint.Y, s.EndPoint.Z, layerSpatRef)); double radius = Math.Sqrt((ellipArc.CenterPoint.X - startPt.X) * (ellipArc.CenterPoint.X - startPt.X) + (ellipArc.CenterPoint.Y - startPt.Y) * (ellipArc.CenterPoint.Y - startPt.Y)); double angle = ellipArc.CentralAngle; MapPoint centerPt = await QueuedTask.Run(() => MapPointBuilder.CreateMapPoint(ellipArc.CenterPoint.X, ellipArc.CenterPoint.Y, (s.StartPoint.Z + s.EndPoint.Z) / 2, layerSpatRef)); //we are creating five intermediate keyframes for arcs MapPoint firstIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle *ARC_CONSTRAINT_FACTOR, radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); MapPoint secondIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle *ARC_CONSTRAINT_FACTOR * 2, radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); MapPoint midIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle * 0.5, radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); MapPoint secondLastIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle *(1 - ARC_CONSTRAINT_FACTOR * 2), radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); MapPoint lastIntPoint = await CreatePointAlongArc(startPt, endPt, centerPt, angle *(1 - ARC_CONSTRAINT_FACTOR), radius, layerSpatRef, ellipArc.IsMinor, ellipArc.IsCounterClockwise); //Create keyframe at start vertex of path in map space double timeSpanValue = accumulatedDuration; TimeSpan keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(startPt, firstIntPoint); //Ignore rotation for all start vertices EXCEPT for the first vertex of path if (num_iterations == 0) { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } else { await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading, true, false); } //await CreateCameraKeyframe(mapView, startPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at PATH_CONSTRAINT_FACTOR distance along the segment from start point timeSpanValue = accumulatedDuration + ARC_CONSTRAINT_FACTOR * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(firstIntPoint, secondIntPoint); await CreateCameraKeyframe(mapView, firstIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at 2* PATH_CONSTRAINT_FACTOR distance along the segment from start point timeSpanValue = accumulatedDuration + ARC_CONSTRAINT_FACTOR * 2 * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(secondIntPoint, midIntPoint); await CreateCameraKeyframe(mapView, secondIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at middle of segment timeSpanValue = accumulatedDuration + 0.5 * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(midIntPoint, secondLastIntPoint); await CreateCameraKeyframe(mapView, midIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at (1 - PATH_CONSTRAINT_FACTOR * 2) distance along the segment from start point timeSpanValue = accumulatedDuration + (1 - ARC_CONSTRAINT_FACTOR * 2) * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(secondLastIntPoint, lastIntPoint); await CreateCameraKeyframe(mapView, secondLastIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at (1 - PATH_CONSTRAINT_FACTOR) distance along the segment from start point timeSpanValue = accumulatedDuration + (1 - ARC_CONSTRAINT_FACTOR) * segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); SetPitchAndHeadingForLine(lastIntPoint, endPt); await CreateCameraKeyframe(mapView, lastIntPoint, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); //Create a keyframe at end point of segment only for the end point of last segment if (num_iterations == segmentCount - 1) { timeSpanValue = accumulatedDuration + segmentDuration; keyframeTimespan = TimeSpan.FromSeconds(timeSpanValue); if (SelectedCameraView == "Face target") { SetPitchAndHeadingForLine(endPt, TargetPoint); } await CreateCameraKeyframe(mapView, endPt, transformation, cameraTrack, keyframeTimespan, _keyframePitch, _keyframeHeading); } } accumulatedDuration += segmentDuration; num_iterations++; } } }
public static async Task CreateKeyframes() { FeatureLayer ftrLayer = null; MapView mapView = MapView.Active; if (mapView == null) { return; } var mapSelection = await QueuedTask.Run(() => MapView.Active.Map.GetSelection()); if (mapSelection.Count == 1) { var layer = mapSelection.First().Key; if (layer is FeatureLayer) { ftrLayer = (FeatureLayer)layer; if (ftrLayer.ShapeType != ArcGIS.Core.CIM.esriGeometryType.esriGeometryPolyline) { ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Select a polyline feature."); return; } int numFtrsSelected = await QueuedTask.Run(() => ftrLayer.GetSelection().GetCount()); if (numFtrsSelected != 1) { ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Select only one polyline feature."); return; } } else { ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Select a polyline feature."); return; } } else { ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Select a polyline feature."); return; } if (SelectedCameraView == "Face target" && TargetPoint == null) { ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Selected view type is - Face target - but a target point is not set."); return; } string oid_fieldName = await QueuedTask.Run(() => ftrLayer.GetTable().GetDefinition().GetObjectIDField()); //get selected polyline Polyline lineGeom = await QueuedTask.Run <Polyline>(() => { var selectedFtrOID = MapView.Active.Map.GetSelection()[ftrLayer][0]; QueryFilter qf = new QueryFilter(); qf.WhereClause = oid_fieldName + " = " + selectedFtrOID.ToString(); RowCursor result = ftrLayer.GetFeatureClass().Search(qf); if (result != null) { result.MoveNext(); Feature selectedFtr = result.Current as Feature; return(selectedFtr.GetShape() as Polyline); } return(null); }); //couldn't get the selected feature if (lineGeom == null) { return; } ProjectionTransformation transformation = await QueuedTask.Run(() => ProjectionTransformation.Create(ftrLayer.GetSpatialReference(), mapView.Map.SpatialReference)); SpatialReference layerSpatRef = await QueuedTask.Run(() => ftrLayer.GetSpatialReference()); if (layerSpatRef.Unit.Name != "Degree") { Z_CONVERSION_FACTOR = layerSpatRef.Unit.ConversionFactor; } //Project target point if method is Face target if (SelectedCameraView == "Face target") { if (TargetPoint != null && TargetPoint.SpatialReference != layerSpatRef) { ProjectionTransformation transf_forTarget = await QueuedTask.Run(() => ProjectionTransformation.Create(TargetPoint.SpatialReference, layerSpatRef)); MapPoint projected_targetPoint = (MapPoint)GeometryEngine.Instance.ProjectEx(TargetPoint, transf_forTarget); TargetPoint = null; TargetPoint = projected_targetPoint; } } var animation = mapView.Map.Animation; var cameraTrack = animation.Tracks.OfType <CameraTrack>().First(); var keyframes = cameraTrack.Keyframes; //Get segment list for line ReadOnlyPartCollection polylineParts = lineGeom.Parts; //get total segment count and determine path length double pathLength = 0; int segmentCount = 0; IEnumerator <ReadOnlySegmentCollection> segments = polylineParts.GetEnumerator(); while (segments.MoveNext()) { ReadOnlySegmentCollection seg = segments.Current; foreach (Segment s in seg) { //pathLength += s.Length;//s.Length returns 2D length double length3D = Math.Sqrt((s.EndPoint.X - s.StartPoint.X) * (s.EndPoint.X - s.StartPoint.X) + (s.EndPoint.Y - s.StartPoint.Y) * (s.EndPoint.Y - s.StartPoint.Y) + (s.EndPoint.Z - s.StartPoint.Z) * (s.EndPoint.Z - s.StartPoint.Z)); pathLength += length3D; segmentCount += 1; } } //reset heading and pitch _keyframeHeading = 0; _keyframePitch = 0; // Create keyframes based on chosen method if (SelectedMethod == "Keyframes along path") { await CreateKeyframes_AlongPath(mapView, layerSpatRef, transformation, cameraTrack, segments, segmentCount, pathLength); } else if (SelectedMethod == "Keyframes every N seconds") { await CreateKeyframes_EveryNSeconds(mapView, layerSpatRef, transformation, cameraTrack, segments, segmentCount, pathLength, KeyEveryNSecond); } else if (SelectedMethod == "Keyframes only at vertices") { await CreateKeyframes_AtVertices(mapView, layerSpatRef, transformation, cameraTrack, lineGeom, segments, segmentCount, pathLength); } }
public async Task <string> GenerateGmlAsync() { MapView mapView = MapView.Active; Map map = mapView?.Map; SpatialReference mapSpatRef = map?.SpatialReference; MySpatialReference myCyclSpatRef = _settings.CycloramaViewerCoordinateSystem; SpatialReference cyclSpatRef = (myCyclSpatRef == null) ? mapSpatRef : (myCyclSpatRef.ArcGisSpatialReference ?? (await myCyclSpatRef.CreateArcGisSpatialReferenceAsync())); Unit unit = cyclSpatRef?.Unit; double factor = unit?.ConversionFactor ?? 1; Color color = Color.White; string result = "<wfs:FeatureCollection xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:wfs=\"http://www.opengis.net/wfs\" xmlns:gml=\"http://www.opengis.net/gml\">"; await QueuedTask.Run(async() => { SpatialReference layerSpatRef = Layer.GetSpatialReference(); IList <IList <Segment> > geometries = new List <IList <Segment> >(); ICollection <Viewer> viewers = _viewerList.Viewers; foreach (var viewer in viewers) { double distance = viewer.OverlayDrawDistance; RecordingLocation recordingLocation = viewer.Location; if (recordingLocation != null) { if (cyclSpatRef?.IsGeographic ?? true) { distance = distance * factor; } else { distance = distance / factor; } double x = recordingLocation.X; double y = recordingLocation.Y; double xMin = x - distance; double xMax = x + distance; double yMin = y - distance; double yMax = y + distance; Envelope envelope = EnvelopeBuilder.CreateEnvelope(xMin, yMin, xMax, yMax, cyclSpatRef); Envelope copyEnvelope = envelope; if (layerSpatRef.Wkid != 0) { ProjectionTransformation projection = ProjectionTransformation.Create(cyclSpatRef, layerSpatRef); copyEnvelope = GeometryEngine.Instance.ProjectEx(envelope, projection) as Envelope; } Polygon copyPolygon = PolygonBuilder.CreatePolygon(copyEnvelope, layerSpatRef); ReadOnlyPartCollection polygonParts = copyPolygon.Parts; IEnumerator <ReadOnlySegmentCollection> polygonSegments = polygonParts.GetEnumerator(); IList <Segment> segments = new List <Segment>(); while (polygonSegments.MoveNext()) { ReadOnlySegmentCollection polygonSegment = polygonSegments.Current; foreach (Segment segment in polygonSegment) { segments.Add(segment); } } geometries.Add(segments); } } GC.Collect(); Polygon polygon = PolygonBuilder.CreatePolygon(geometries, layerSpatRef); using (FeatureClass featureClass = Layer?.GetFeatureClass()) { string uri = Layer?.URI; SpatialQueryFilter spatialFilter = new SpatialQueryFilter { FilterGeometry = polygon, SpatialRelationship = SpatialRelationship.Intersects, SubFields = "*" }; using (RowCursor existsResult = featureClass?.Search(spatialFilter, false)) { while (existsResult?.MoveNext() ?? false) { Row row = existsResult.Current; long objectId = row.GetObjectID(); if ((_selection == null) || (!_selection.Contains(objectId))) { Feature feature = row as Feature; var fieldvalues = new Dictionary <string, string> { { FieldUri, uri }, { FieldObjectId, objectId.ToString() } }; Geometry geometry = feature?.GetShape(); GeometryType geometryType = geometry?.GeometryType ?? GeometryType.Unknown; Geometry copyGeometry = geometry; if ((geometry != null) && (layerSpatRef.Wkid != 0)) { ProjectionTransformation projection = ProjectionTransformation.Create(layerSpatRef, cyclSpatRef); copyGeometry = GeometryEngine.Instance.ProjectEx(geometry, projection); } if (copyGeometry != null) { string gml = string.Empty; switch (geometryType) { case GeometryType.Envelope: break; case GeometryType.Multipatch: break; case GeometryType.Multipoint: break; case GeometryType.Point: MapPoint point = copyGeometry as MapPoint; if (point != null) { gml = $"<gml:Point {GmlDimension(copyGeometry)}><gml:coordinates>{await GmlPointAsync(point)}</gml:coordinates></gml:Point>"; } break; case GeometryType.Polygon: Polygon polygonGml = copyGeometry as Polygon; if (polygonGml != null) { ReadOnlyPartCollection polygonParts = polygonGml.Parts; IEnumerator <ReadOnlySegmentCollection> polygonSegments = polygonParts.GetEnumerator(); while (polygonSegments.MoveNext()) { ReadOnlySegmentCollection segments = polygonSegments.Current; gml = $"{gml}<gml:MultiPolygon><gml:PolygonMember><gml:Polygon {GmlDimension(copyGeometry)}><gml:outerBoundaryIs><gml:LinearRing><gml:coordinates>"; for (int i = 0; i < segments.Count; i++) { if (segments[i].SegmentType == SegmentType.Line) { MapPoint polygonPoint = segments[i].StartPoint; gml = $"{gml}{((i == 0) ? string.Empty : " ")}{await GmlPointAsync(polygonPoint)}"; if (i == (segments.Count - 1)) { polygonPoint = segments[i].EndPoint; gml = $"{gml} {await GmlPointAsync(polygonPoint)}"; } } } gml = $"{gml}</gml:coordinates></gml:LinearRing></gml:outerBoundaryIs></gml:Polygon></gml:PolygonMember></gml:MultiPolygon>"; } } break; case GeometryType.Polyline: Polyline polylineGml = copyGeometry as Polyline; if (polylineGml != null) { ReadOnlyPartCollection polylineParts = polylineGml.Parts; IEnumerator <ReadOnlySegmentCollection> polylineSegments = polylineParts.GetEnumerator(); while (polylineSegments.MoveNext()) { ReadOnlySegmentCollection segments = polylineSegments.Current; gml = $"{gml}<gml:MultiLineString><gml:LineStringMember><gml:LineString {GmlDimension(copyGeometry)}><gml:coordinates>"; for (int i = 0; i < segments.Count; i++) { if (segments[i].SegmentType == SegmentType.Line) { MapPoint linePoint = segments[i].StartPoint; gml = $"{gml}{((i == 0) ? string.Empty : " ")}{await GmlPointAsync(linePoint)}"; if (i == (segments.Count - 1)) { linePoint = segments[i].EndPoint; gml = $"{gml} {await GmlPointAsync(linePoint)}"; } } } gml = $"{gml}</gml:coordinates></gml:LineString></gml:LineStringMember></gml:MultiLineString>"; } } break; case GeometryType.Unknown: break; } string fieldValueStr = fieldvalues.Aggregate(string.Empty, (current, fieldvalue) => string.Format("{0}<{1}>{2}</{1}>", current, fieldvalue.Key, fieldvalue.Value)); result = $"{result}<gml:featureMember><xs:Geometry>{fieldValueStr}{gml}</xs:Geometry></gml:featureMember>"; } } } } } CIMRenderer renderer = Layer.GetRenderer(); CIMSimpleRenderer simpleRenderer = renderer as CIMSimpleRenderer; CIMUniqueValueRenderer uniqueValueRendererRenderer = renderer as CIMUniqueValueRenderer; CIMSymbolReference symbolRef = simpleRenderer?.Symbol ?? uniqueValueRendererRenderer?.DefaultSymbol; CIMSymbol symbol = symbolRef?.Symbol; CIMColor cimColor = symbol?.GetColor(); double[] colorValues = cimColor?.Values; int red = ((colorValues != null) && (colorValues.Length >= 1)) ? ((int)colorValues[0]) : 255; int green = ((colorValues != null) && (colorValues.Length >= 2)) ? ((int)colorValues[1]) : 255; int blue = ((colorValues != null) && (colorValues.Length >= 3)) ? ((int)colorValues[2]) : 255; int alpha = ((colorValues != null) && (colorValues.Length >= 4)) ? ((int)colorValues[3]) : 255; color = Color.FromArgb(alpha, red, green, blue); }); GmlChanged = (Color != color); Color = color; string newGml = $"{result}</wfs:FeatureCollection>"; GmlChanged = ((newGml != Gml) || GmlChanged); return(Gml = newGml); }
/* * protected async override void OnUpdate() * { * Cursor nowCursor = Cursor; * Cursor = _containsFeatures ? Cursors.Arrow : _thisCursor; * * if (nowCursor != Cursor) * { * await FrameworkApplication.SetCurrentToolAsync("esri_mapping_exploreTool"); * await FrameworkApplication.SetCurrentToolAsync("globeSpotterArcGISPro_openImageTool"); * } * * base.OnUpdate(); * } * * protected override async void OnToolMouseMove(MapViewMouseEventArgs e) * { * await QueuedTask.Run(() => * { * var constants = ConstantsRecordingLayer.Instance; * double size = constants.SizeLayer; * double halfSize = size / 2; * MapView activeView = MapView.Active; * * WinPoint clientPoint = e.ClientPoint; * WinPoint pointScreen = activeView.ClientToScreen(clientPoint); * double x = pointScreen.X; * double y = pointScreen.Y; * WinPoint minPoint = new WinPoint(x - halfSize, y - halfSize); * WinPoint maxPoint = new WinPoint(x + halfSize, y + halfSize); * MapPoint minPoint1 = activeView.ScreenToMap(minPoint); * MapPoint maxPoint1 = activeView.ScreenToMap(maxPoint); * Envelope envelope = EnvelopeBuilder.CreateEnvelope(minPoint1, maxPoint1, minPoint1.SpatialReference); * var features = MapView.Active?.GetFeatures(envelope); * _containsFeatures = (features != null) && (features.Count >= 1); * }); * * base.OnToolMouseMove(e); * } */ protected override Task <bool> OnSketchCompleteAsync(Geometry geometry) { return(QueuedTask.Run(() => { MapPoint point = geometry as MapPoint; MapView activeView = MapView.Active; if (point != null && activeView != null) { var constants = ConstantsRecordingLayer.Instance; double size = constants.SizeLayer; double halfSize = size / 2; SpatialReference pointSpatialReference = point.SpatialReference; var pointScreen = activeView.MapToScreen(point); double x = pointScreen.X; double y = pointScreen.Y; WinPoint pointScreenMin = new WinPoint(x - halfSize, y - halfSize); WinPoint pointScreenMax = new WinPoint(x + halfSize, y + halfSize); var pointMapMin = activeView.ScreenToMap(pointScreenMin); var pointMapMax = activeView.ScreenToMap(pointScreenMax); Envelope envelope = EnvelopeBuilder.CreateEnvelope(pointMapMin, pointMapMax, pointSpatialReference); var features = activeView.GetFeatures(envelope); GlobeSpotter globeSpotter = GlobeSpotter.Current; CycloMediaGroupLayer groupLayer = globeSpotter?.CycloMediaGroupLayer; if (features != null && groupLayer != null) { _nearest = Keyboard.IsKeyDown(Key.LeftCtrl) || Keyboard.IsKeyDown(Key.RightCtrl); if (_nearest) { Settings settings = Settings.Instance; MySpatialReference cycloCoordSystem = settings.CycloramaViewerCoordinateSystem; if (cycloCoordSystem != null) { SpatialReference cycloSpatialReference = cycloCoordSystem.ArcGisSpatialReference ?? cycloCoordSystem.CreateArcGisSpatialReferenceAsync().Result; if (pointSpatialReference.Wkid != cycloSpatialReference.Wkid) { ProjectionTransformation projection = ProjectionTransformation.Create(pointSpatialReference, cycloSpatialReference); point = GeometryEngine.Instance.ProjectEx(point, projection) as MapPoint; } if (point != null) { CultureInfo ci = CultureInfo.InvariantCulture; _location = string.Format(ci, "{0},{1}", point.X, point.Y); if (!globeSpotter.InsideScale()) { double minimumScale = ConstantsRecordingLayer.Instance.MinimumScale; double scale = minimumScale / 2; Camera camera = new Camera(point.X, point.Y, scale, 0.0); MapView.Active?.ZoomTo(camera); } } } } else { foreach (var feature in features) { Layer layer = feature.Key; CycloMediaLayer cycloMediaLayer = groupLayer.GetLayer(layer); if (cycloMediaLayer != null) { foreach (long uid in feature.Value) { Recording recording = cycloMediaLayer.GetRecordingAsync(uid).Result; if (recording.IsAuthorized == null || (bool)recording.IsAuthorized) { _location = recording.ImageId; } } } } } } } return true; })); }
/// <summary> /// Creates keyframes along the path using the user defined settings. /// </summary> /// <param name="line">The geometry of the line to fly along.</param> /// <param name="verticalUnit">The elevation unit of the 3D layer</param> internal Task CreateKeyframesAlongPath(Polyline line, Unit verticalUnit) { return(QueuedTask.Run(() => { var mapView = MapView.Active; if (mapView == null) { return; } //Get the camera track from the active map's animation. //There will always be only one camera track in the animation. var cameraTrack = mapView.Map.Animation.Tracks.OfType <CameraTrack>().First(); //Get some of the user settings for constructing the keyframes alone the path. var densifyDistance = Animation.Settings.KeyEvery; var verticalOffset = Animation.Settings.HeightAbove / ((mapView.Map.SpatialReference.IsGeographic) ? 1.0 : mapView.Map.SpatialReference.Unit.ConversionFactor); //1 meter double currentTimeSeconds = GetInsertTime(mapView.Map.Animation); //We need to project the line to a projected coordinate system to calculate the line's length in 3D //as well as more accurately calculated heading and pitch along the path. if (line.SpatialReference.IsGeographic) { if (mapView.Map.SpatialReference.IsGeographic) { var transformation = ProjectionTransformation.Create(line.SpatialReference, SpatialReferences.WebMercator, line.Extent); line = GeometryEngine.ProjectEx(line, transformation) as Polyline; } else { var transformation = ProjectionTransformation.Create(line.SpatialReference, mapView.Map.SpatialReference, line.Extent); line = GeometryEngine.ProjectEx(line, transformation) as Polyline; } } //If the user has specified to create keyframes at additional locations than just the vertices //we will densify the line by the distance the user specified. if (!Animation.Settings.VerticesOnly) { line = GeometryEngine.DensifyByLength(line, densifyDistance / line.SpatialReference.Unit.ConversionFactor) as Polyline; } //To maintain a constant speed we need to divide the total time we want the animation to take by the length of the line. var duration = Animation.Settings.Duration; var secondsPerUnit = duration / line.Length3D; Camera prevCamera = null; //Loop over each vertex in the line and create a new keyframe at each. for (int i = 0; i < line.PointCount; i++) { #region Camera MapPoint cameraPoint = line.Points[i]; //If the point is not in the same spatial reference of the map we need to project it. if (cameraPoint.SpatialReference.Wkid != mapView.Map.SpatialReference.Wkid) { var transformation = ProjectionTransformation.Create(cameraPoint.SpatialReference, mapView.Map.SpatialReference); cameraPoint = GeometryEngine.Project(cameraPoint, mapView.Map.SpatialReference) as MapPoint; } //Construct a new camera from the point. var camera = new Camera(cameraPoint.X, cameraPoint.Y, cameraPoint.Z, Animation.Settings.Pitch, 0.0, cameraPoint.SpatialReference, CameraViewpoint.LookFrom); //Convert the Z unit to meters if the camera is not in a geographic coordinate system. if (!camera.SpatialReference.IsGeographic) { camera.Z /= camera.SpatialReference.Unit.ConversionFactor; } //Convert the Z to the unit of the layer's elevation unit and then add the user defined offset from the line. camera.Z *= verticalUnit.ConversionFactor; camera.Z += verticalOffset; //If this is the last point in the collection use the same heading and pitch from the previous camera. if (i + 1 == line.Points.Count) { camera.Heading = prevCamera.Heading; camera.Pitch = prevCamera.Pitch; } else { var currentPoint = line.Points[i]; var nextPoint = line.Points[i + 1]; #region Heading //Calculate the heading from the current point to the next point in the path. var difX = nextPoint.X - currentPoint.X; var difY = nextPoint.Y - currentPoint.Y; var radian = Math.Atan2(difX, difY); var heading = radian * -180 / Math.PI; camera.Heading = heading; #endregion #region Pitch //If the user doesn't want to hardcode the pitch, calculate the pitch based on the current point to the next point. if (Animation.Settings.UseLinePitch) { var hypotenuse = Math.Sqrt(Math.Pow(difX, 2) + Math.Pow(difY, 2)); var difZ = nextPoint.Z - currentPoint.Z; //If the line's unit is not the same as the elevation unit of the layer we need to convert the Z so they are in the same unit. if (line.SpatialReference.Unit.ConversionFactor != verticalUnit.ConversionFactor) { difZ *= (verticalUnit.ConversionFactor / line.SpatialReference.Unit.ConversionFactor); } radian = Math.Atan2(difZ, hypotenuse); var pitch = radian * 180 / Math.PI; camera.Pitch = pitch; } else { camera.Pitch = Animation.Settings.Pitch; } #endregion } #endregion #region Time //The first point will have a time of 0 seconds, after that we need to set the time based on the 3D distance between the points. if (i > 0) { var lineSegment = PolylineBuilder.CreatePolyline(new List <MapPoint>() { line.Points[i - 1], line.Points[i] }, line.SpatialReference); var length = lineSegment.Length3D; currentTimeSeconds += length * secondsPerUnit; } #endregion //Create a new keyframe using the camera and the time. cameraTrack.CreateKeyframe(camera, TimeSpan.FromSeconds(currentTimeSeconds), AnimationTransition.Linear); prevCamera = camera; } })); }
async private void Image_MouseEnter(object sender, MouseEventArgs e) { Polygon polygon; var blackSolidLineSymbol = SymbolFactory.Instance.ConstructLineSymbol(ColorFactory.Instance.BlueRGB, 5, SimpleLineStyle.Solid); Geometry geometry = null; await ArcGIS.Desktop.Framework.Threading.Tasks.QueuedTask.Run(() => { SpatialReference inSR = SpatialReferenceBuilder.CreateSpatialReference(32604); SpatialReference sr4326 = SpatialReferences.WGS84; SpatialReference sr3857 = SpatialReferences.WebMercator; ProjectionTransformation projTransFromSRs = ArcGIS.Core.Geometry.ProjectionTransformation.Create(inSR, sr3857); List <Coordinate2D> coordinates = new List <Coordinate2D>() { //new Coordinate2D(-159.20168702818188, 21.876487211082708), //new Coordinate2D(-159.42653907783114, 21.838951660451173), //new Coordinate2D(-159.44077880308507, 21.94718691051718), //new Coordinate2D(-159.21630329750306, 21.94718691051718), //new Coordinate2D(-159.21413990271841, 21.9365008022738), //new Coordinate2D(-159.21383956606297, 21.93655454291286), //new Coordinate2D(-159.20168702818188, 21.876487211082708), new Coordinate2D(-17773406.8675, 2478583.7239999995), new Coordinate2D(-17773406.8675, 2578583.7239999995), new Coordinate2D(-16773406.8675, 2578583.7239999995), new Coordinate2D(-17773406.8675, 2478583.7239999995) }; //MapPoint point = new MapPointBuilder.FromGeoCoordinateString() //List<MapPoint> mapPoints = new List<MapPoint>(); //foreach (Coordinate2D item in coordinates) //{ // MapPoint point = new MapPointBuilder() //} //mapPoints.Add( coordinates[0].ToMapPoint()); MapPointBuilder asas = new MapPointBuilder(new Coordinate2D(-159.20168702818188, 21.876487211082708), MapView.Active.Extent.SpatialReference); _polygonSymbol = SymbolFactory.Instance.ConstructPolygonSymbol(ColorFactory.Instance.BlackRGB, SimpleFillStyle.Null, SymbolFactory.Instance.ConstructStroke(ColorFactory.Instance.BlackRGB, 2.0, SimpleLineStyle.Solid)); MapPoint point = asas.ToGeometry(); MapPoint point2 = MapPointBuilder.FromGeoCoordinateString(point.ToGeoCoordinateString(new ToGeoCoordinateParameter(GeoCoordinateType.DD)), MapView.Active.Extent.SpatialReference, GeoCoordinateType.DD); using (PolygonBuilder polygonBuilder = new PolygonBuilder(coordinates, inSR)) { polygonBuilder.SpatialReference = inSR; polygon = polygonBuilder.ToGeometry(); geometry = polygonBuilder.ToGeometry(); Geometry geometry2 = GeometryEngine.Instance.ProjectEx(geometry, projTransFromSRs); _graphic = MapView.Active.AddOverlayAsync(geometry, _polygonSymbol.MakeSymbolReference()); //Application.Current. } }); //await QueuedTask.Run(() => //{ // MapView.Active.UpdateOverlay(_graphic, point, SymbolFactory.Instance.ConstructPointSymbol( // ColorFactory.Instance.BlueRGB, 20.0, SimpleMarkerStyle.Circle).MakeSymbolReference()); //}); //_graphic = await this.AddOverlayAsync(geometry, _lineSymbol.MakeSymbolReference()); Console.WriteLine(sender.ToString()); //_graphic = MapView.Active.AddOverlay(geometry, _lineSymbol.MakeSymbolReference()); //Geometry geometry = new PolygonBuilder.CreatePolygon(coordinates, inSR); ; }