public void write_polygon_with_hole() { Coordinate c0 = new Coordinate(10.1, 10); Coordinate c1 = new Coordinate(20.2, 20); Coordinate c2 = new Coordinate(30.3, 30); ILinearRing shell = Factory.CreateLinearRing(new[] { c0, c1, c2, c0 }); Coordinate h0 = new Coordinate(15, 15); Coordinate h1 = new Coordinate(17, 15); Coordinate h2 = new Coordinate(15, 17); ILinearRing hole = Factory.CreateLinearRing(new[] { h0, h1, h2, h0 }); IGeometry geometry = Factory.CreatePolygon(shell, new[] { hole }); AttributesTable attributes = new AttributesTable(); attributes.Add("prop0", "value0"); attributes.Add("prop1", "value1"); IFeature feature = new Feature(geometry, attributes); TopoJsonWriter writer = new TopoJsonWriter(); string actual = writer.Write(feature); Assert.That(actual, Is.Not.Null); Assert.That(actual, Is.Not.Empty); Console.WriteLine(actual); Assert.That(actual, Is.EqualTo(TopoWriterData.PolygonWithHole)); }
public void WriteToNTS() { if (vectLocations.Contains(new Vector3(0, 0, 0))) { return; } if (vertexSelector.lineFeatures.Contains(feature)) { attribute["Title"] = title.GetComponent <TMP_InputField>().text; attribute["Description"] = description.GetComponent <TMP_InputField>().text; feature.Attributes = attribute; } else { attribute.Add("Title", title.GetComponent <TMP_InputField>().text); attribute.Add("Description", description.GetComponent <TMP_InputField>().text); feature.Attributes = attribute; GeoAPI.Geometries.Coordinate[] coord = new GeoAPI.Geometries.Coordinate[vectLocations.Count]; int i = 0; foreach (Vector3 v in vectLocations) { var cartesianCoordinates = new Cognitics.CoordinateSystems.CartesianCoordinates(v.x, v.z); var geographicCoordinates = cartesianCoordinates.TransformedWith(cdbDatabase.Projection); coord[i] = new GeoAPI.Geometries.Coordinate(geographicCoordinates.Longitude, geographicCoordinates.Latitude, v.y); ++i; } LineString ls = new LineString(coord); feature.Geometry = ls; vertexSelector.lineFeatures.Add(feature); } ClearFields(); }
private static AttributesTable CreateAttributes(int id, string label) { var res = new AttributesTable(); res.Add("id", id); res.Add("label", label); return(res); }
public void TestBasicReadAndWriteOperationsWithCustomComparer() { var expected = new Dictionary <string, object>(); var actual = new AttributesTable(StringComparer.OrdinalIgnoreCase); // test everything after a serialize + deserialize round-trip ExpectedAndActual.RoundTrip(ref expected, ref actual); expected.Add("hello", "hi"); actual.Add("HELLO", "hi"); AssertEqual(expected, actual, ignoreCase: true); AttributesTable.AddAttributeWithIndexer = false; Assert.That(() => actual["Oh"] = 321, Throws.ArgumentException); AssertEqual(expected, actual, ignoreCase: true); AttributesTable.AddAttributeWithIndexer = true; expected["oh"] = 321; Assert.That(() => actual["oH"] = 321, Throws.Nothing); AssertEqual(expected, actual, ignoreCase: true); AssertEqual(expected, actual, ignoreCase: true); expected.Remove("hello"); actual.DeleteAttribute("hELlo"); AssertEqual(expected, actual, ignoreCase: true); Assert.That(() => actual.DeleteAttribute("heLLo"), Throws.ArgumentException); AssertEqual(expected, actual, ignoreCase: true); expected.Add("hello", new object()); actual.Add("hEllO", expected["hello"]); AssertEqual(expected, actual, ignoreCase: true); expected["hello"] = Guid.NewGuid(); actual["HelLo"] = expected["hello"]; AssertEqual(expected, actual, ignoreCase: true); Assert.That(() => actual.Add("hEllo", "oh, hi there"), Throws.ArgumentException); AssertEqual(expected, actual, ignoreCase: true); Assert.That(() => actual["a key that shouldn't exist"], Throws.ArgumentException); Assert.That(() => actual.GetType("a key that shouldn't exist"), Throws.InstanceOf <ArgumentOutOfRangeException>()); AssertEqual(expected, actual, ignoreCase: true); }
/// <summary> /// Convert the given OSM way to a GeoJSON line feature. /// </summary> /// <param name="Way">An OSM way.</param> public static Feature ToGeoJSON(this Way Way) { // { // "type": "Feature", // "id": "way/305352912", // "properties": { // "@id": "way/305352912", // }, // "geometry": { // "type": "LineString", // "coordinates": [ [ 11.6023278, 50.8926376 ], [ 11.5054540, 50.7980146 ], [ 11.6023278, 50.8926376 ] ] // } // } // https://wiki.openstreetmap.org/wiki/Overpass_turbo/Polygon_Features try { if (Way.Nodes.Count == 0) { return(null); } var FirstNode = Way.Nodes.First(); var LastNode = Way.Nodes.Last(); var isClosed = FirstNode.Latitude == LastNode.Latitude && FirstNode.Longitude == LastNode.Longitude; var id = string.Concat("way/", Way.Id); AttributesTable props = new AttributesTable(); props.Add("osmid", id); foreach (var tag in Way.Tags) { props.Add(tag.Key, tag.Value); } Geometry geometry; var ring = new LineString(Way.Nodes.Select(n => new Coordinate(n.Longitude, n.Latitude)).ToArray()); if (isClosed) { geometry = new Polygon(new LinearRing(ring.Coordinates)); } else { geometry = ring; } var feature = new Feature(geometry, props); return(feature); } catch (Exception ex) { Trace.TraceWarning("OSM to GeoJSON error for feature"); return(null); } }
internal static IEnumerable <Feature> Load(string v) { var shapeFileDataReader = Shapefile.CreateDataReader(v, new GeometryFactory()); ShapefileHeader shpHeader = shapeFileDataReader.ShapeHeader; DbaseFileHeader header = shapeFileDataReader.DbaseHeader; shapeFileDataReader.Reset(); //Read through all records of the shapefile (geometry and attributes) into a feature collection List <Feature> features = new List <Feature>(); int j = 1; while (shapeFileDataReader.Read()) { Feature feature = new Feature(); AttributesTable attributesTable = new AttributesTable(); string[] keys = new string[header.NumFields]; var pm = new PrecisionModel(10.0); var pop = new NetTopologySuite.Precision.GeometryPrecisionReducer(pm); Geometry geometry = NetTopologySuite.Simplify.DouglasPeuckerSimplifier.Simplify(pop.Reduce((Geometry)shapeFileDataReader.Geometry), 0.5); // geometry = NetTopologySuite.Operation.BoundaryOp.GetBoundary(geometry); // var pol = new NetTopologySuite.Operation.Polygonize.Polygonizer(); // pol.Add() if (geometry.IsEmpty) { continue; } for (int i = 0; i < header.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = header.Fields[i]; keys[i] = fldDescriptor.Name; attributesTable.Add(fldDescriptor.Name, shapeFileDataReader.GetValue(i + 1)); } if (!attributesTable.GetNames().Contains("NAME", StringComparer.InvariantCulture)) { attributesTable.Add("NAME", j); } feature.Geometry = geometry; feature.Attributes = attributesTable; features.Add(feature); j++; } //Close and free up any resources shapeFileDataReader.Close(); shapeFileDataReader.Dispose(); return(features); }
/// <summary> /// This function will search the feature attributes for all relevant names and place them in an object /// to allow database search and a single place to look for a feature names. /// </summary> /// <param name="feature"></param> public static void SetTitles(this IFeature feature) { var table = new AttributesTable(); var names = feature.Attributes.GetNames().OrderBy(n => n.Length).Where(a => a.Contains(FeatureAttributes.NAME)).ToArray(); foreach (var language in Languages.Array) { var namesByLanguage = names.Where(n => n.Contains(":" + language)).Select(a => feature.Attributes[a].ToString()).ToArray(); names = names.Except(names.Where(n => n.Contains(":" + language))).ToArray(); table.Add(language, namesByLanguage); } // names with no specific language table.Add(Languages.ALL, names.Select(n => feature.Attributes[n].ToString()).ToArray()); feature.Attributes.AddOrUpdate(FeatureAttributes.POI_NAMES, table); }
public void WriteJsonTest() { AttributesTableConverter target = new AttributesTableConverter(); StringBuilder sb = new StringBuilder(); JsonTextWriter writer = new JsonTextWriter(new StringWriter(sb)); AttributesTable value = new AttributesTable(); value.Add("test1", "value1"); value.Add("test2", "value2"); JsonSerializer serializer = new JsonSerializer(); target.WriteJson(writer, value, serializer); writer.Flush(); Assert.AreEqual("{\"test1\":\"value1\",\"test2\":\"value2\"}", sb.ToString()); }
private void TestShapeCreation() { var points = new Coordinate[3]; points[0] = new Coordinate(0, 0); points[1] = new Coordinate(1, 0); points[2] = new Coordinate(1, 1); var line_string = new LineString(points); var attributes = new AttributesTable(); attributes.Add("FOO", "FOO"); var feature = new Feature(Factory.CreateMultiLineString(new ILineString[] { line_string }), attributes); var features = new Feature[1]; features[0] = feature; var shp_writer = new ShapefileDataWriter("line_string") { Header = ShapefileDataWriter.GetHeader(features[0], features.Length) }; shp_writer.Write(features); }
/// <summary> /// Reads a shapefile into a arraylist of features that need converting from x,y coordinates to Long and Lat coordinates /// </summary> /// <param name="filename">name of the shapefile (the file that has all the polygons for the footpaths)</param> /// <param name="fact">the class that generates the structure of the points</param> /// <returns></returns> public ArrayList ReadSHP(string filename, GeometryFactory fact) { ArrayList features = new ArrayList(); //Array list for all the coordinates from the shapefile ShapefileDataReader sfDataReader = new ShapefileDataReader(filename, fact); //takes a file and a factory to build the geometries ShapefileHeader shpHeader = sfDataReader.ShapeHeader; //reads the headers of the file for checking and looping purposes DbaseFileHeader DHeader = sfDataReader.DbaseHeader; while (sfDataReader.Read() == true) //reading through all the data in the shapefile { Feature feature = new Feature(); //setting up a feature for each set of points AttributesTable atTable = new AttributesTable(); //table for the set of points string[] keys = new string[DHeader.NumFields]; Geometry geometry = sfDataReader.Geometry; for (int i = 0; i < DHeader.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = DHeader.Fields[i]; keys[i] = fldDescriptor.Name; atTable.Add(fldDescriptor.Name, sfDataReader.GetValue(i)); } feature.Geometry = geometry; feature.Attributes = atTable; //setting the variables for the feature features.Add(feature); } sfDataReader.Close();//closing the reader sfDataReader.Dispose(); return(features); }
public async Task <IActionResult> PostFindUnmappedPartsFromGpsTrace([FromQuery] int traceId) { var file = await CreateOsmGateway().GetTraceData(traceId); if (file == null) { return(BadRequest("Invalid trace id: " + traceId)); } using (var memoryStream = new MemoryStream()) { await file.Stream.CopyToAsync(memoryStream); var gpxBytes = await _dataContainerConverterService.Convert(memoryStream.ToArray(), file.FileName, DataContainerConverterService.GPX); var gpx = gpxBytes.ToGpx().UpdateBounds(); var highwayType = GetHighwayType(gpx); var gpxItmLines = GpxToItmLineStrings(gpx); if (!gpxItmLines.Any()) { return(BadRequest("File does not contain any traces...")); } var manipulatedItmLines = await _addibleGpxLinesFinderService.GetLines(gpxItmLines); var attributesTable = new AttributesTable { { "highway", highwayType } }; attributesTable.Add("source", "trace id: " + traceId); var featureCollection = new FeatureCollection(); foreach (var line in manipulatedItmLines) { featureCollection.Add(new Feature(ToWgs84LineString(line.Coordinates), attributesTable)); } return(Ok(featureCollection)); } }
/// <summary> /// Reads a shapefile into a arraylist of features that need converting from x,y coordinates to Long and Lat coordinates /// </summary> /// <param name="filename">name of the shapefile (the file that has all the polygons for the footpaths)</param> /// <param name="fact">the class that generates the structure of the points</param> /// <returns></returns> public ArrayList ReadSHP(string filename, GeometryFactory fact) { ArrayList features = new ArrayList(); //Array list for all the coordinates from the shapefile ShapefileDataReader sfDataReader = new ShapefileDataReader(filename, fact); ShapefileHeader shpHeader = sfDataReader.ShapeHeader; DbaseFileHeader DHeader = sfDataReader.DbaseHeader; while (sfDataReader.Read() == true) { Feature feature = new Feature(); AttributesTable atTable = new AttributesTable(); string[] keys = new string[DHeader.NumFields]; Geometry geometry = sfDataReader.Geometry; for (int i = 0; i < DHeader.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = DHeader.Fields[i]; keys[i] = fldDescriptor.Name; atTable.Add(fldDescriptor.Name, sfDataReader.GetValue(i)); } feature.Geometry = geometry; feature.Attributes = atTable; features.Add(feature); } sfDataReader.Close(); sfDataReader.Dispose(); return(features); }
public static List <Feature> ReadShpFile(string pathToShpFile) { GeometryFactory factory = new GeometryFactory(); ShapefileDataReader shapeFileDataReader = new ShapefileDataReader(pathToShpFile, factory); var features = new List <Feature> (); string[] fieldNames = new string[shapeFileDataReader.FieldCount]; for (int i = 0; i < fieldNames.Length; i++) { fieldNames[i] = shapeFileDataReader.GetName(i); } while (shapeFileDataReader.Read()) { AttributesTable attributesTable = new AttributesTable(); for (int i = 1; i < fieldNames.Length; i++) { attributesTable.Add(fieldNames[i], shapeFileDataReader.GetValue(i)); } Feature feature = new Feature(shapeFileDataReader.Geometry, attributesTable); features.Add(feature); } return(features); }
private static void Log(Dictionary <string, IGeometry> shapes, string step, Stopwatch watch) { watch.Stop(); Console.WriteLine(step + $": {watch.ElapsedMilliseconds / 1000m} seconds"); if (OutputFolder == null) { return; } var geoJson = new GeoJsonWriter(); var features = new FeatureCollection(); foreach (var result in shapes) { var feature = new Feature(); feature.Geometry = result.Value; var attributesTable = new AttributesTable(); if (result.Key != null) { attributesTable.Add("Id", result.Key); } feature.Attributes = attributesTable; features.Add(feature); } var json = geoJson.Write(features); File.WriteAllText(OutputFolder + $@"{step}.json", json); }
public static FeatureCollection ReadShapeFile(string localShapeFile) { var collection = new FeatureCollection(); var factory = new GeometryFactory(); using (var reader = new ShapefileDataReader(localShapeFile, factory)) { var header = reader.DbaseHeader; while (reader.Read()) { var f = new Feature { Geometry = reader.Geometry }; var attrs = new AttributesTable(); for (var i = 0; i < header.NumFields; i++) { attrs.Add(header.Fields[i].Name, reader.GetValue(i)); } f.Attributes = attrs; collection.Add(f); } } return(collection); }
public IEnumerable <TimeZoneFeature> ReadShapeFile() { var factory = new GeometryFactory(); using (var reader = new ShapefileDataReader(_shapeFile, factory)) { var header = reader.DbaseHeader; while (reader.Read()) { var attributes = new AttributesTable(); for (int i = 0; i < header.NumFields; i++) { var name = header.Fields[i].Name; var value = reader.GetValue(i); attributes.Add(name, value); } // skip uninhabited areas var zone = (string)attributes["tzid"]; if (zone.Equals("uninhabited", StringComparison.OrdinalIgnoreCase)) { continue; } var geometry = reader.Geometry; yield return(new TimeZoneFeature { TzName = zone, Geometry = geometry, }); } } }
public async Task <IActionResult> PostGpsTrace([FromQuery] string url = "", [FromForm] IFormFile file = null) { var fileFetcherGatewayResponse = await GetFile(url, file); if (fileFetcherGatewayResponse == null) { return(BadRequest("Url is not provided or the file is empty... " + url)); } var gpxBytes = await _dataContainerConverterService.Convert(fileFetcherGatewayResponse.Content, fileFetcherGatewayResponse.FileName, DataContainerConverterService.GPX); var gpx = gpxBytes.ToGpx().UpdateBounds(); var highwayType = GetHighwayType(gpx); var gpxItmLines = GpxToItmLineStrings(gpx); if (!gpxItmLines.Any()) { return(BadRequest("File does not contain any traces...")); } var manipulatedItmLines = await _addibleGpxLinesFinderService.GetLines(gpxItmLines); var attributesTable = new AttributesTable { { "highway", highwayType } }; if (string.IsNullOrEmpty(url) == false) { attributesTable.Add("source", url); } var features = manipulatedItmLines.Select(l => new Feature(ToWgs84LineString(l.Coordinates), attributesTable) as IFeature).ToList(); return(Ok(new FeatureCollection(new Collection <IFeature>(features)))); }
// see https://code.google.com/p/nettopologysuite/issues/detail?id=146 public void Issue146_ShapeCreationWithInvalidAttributeName() { var points = new Coordinate[3]; points[0] = new Coordinate(0, 0); points[1] = new Coordinate(1, 0); points[2] = new Coordinate(1, 1); var ls = new LineString(points); var mls = GeometryFactory.Default.CreateMultiLineString(new ILineString[] { ls }); var attrs = new AttributesTable(); attrs.Add("Simulation name", "FOO"); var features = new[] { new Feature(mls, attrs) }; ShapefileDataWriter shp_writer = null; Assert.Throws <ArgumentException>(() => shp_writer = new ShapefileDataWriter("invalid_line_string") { Header = ShapefileDataWriter.GetHeader(features[0], features.Length) }); //Assert.Throws<ArgumentException>(() => shp_writer.Write(features)); }
public static FeatureCollection GetFeatureCollection(ShapefileDataReader reader) { FeatureCollection features = new FeatureCollection(); using (reader) { var header = reader.DbaseHeader; while (reader.Read()) { var feature = new Feature(); var attributes = new AttributesTable(); for (var i = 0; i < header.NumFields; i++) { attributes.Add(header.Fields[i].Name, reader.GetValue(i)); } feature.Geometry = reader.Geometry; feature.Attributes = attributes; features.Add(feature); } } return(features); }
public IAttributesTable ReadEntry(int index) { if (m_IsDisposed) { throw new InvalidOperationException("Reader was disposed, cannot read from a disposed reader"); } if (index < 0) { throw new ArgumentException("Index must be positive", "index"); } int seekLocation = m_Header.HeaderLength + (index * m_Header.RecordLength); if (seekLocation >= m_FileReader.BaseStream.Length) { throw new ArgumentOutOfRangeException("index", "No DBF entry with index " + index); } m_FileReader.BaseStream.Seek(seekLocation, SeekOrigin.Begin); var tbl = new AttributesTable(); var data = ReadCurrentEntry(); for (int i = 0; i < data.Count; i++) { tbl.Add(m_Header.Fields[i].Name, data[i]); } return(tbl); }
public static IReadOnlyCollection <Feature> ReadFeatures(this ShapefileDataReader shapefileDataReader) { List <Feature> features = new List <Feature>(); while (shapefileDataReader.Read()) { Feature feature = new Feature(); AttributesTable attributesTable = new AttributesTable(); DbaseFileHeader header = shapefileDataReader.DbaseHeader; string[] keys = new string[header.NumFields]; var geometry = shapefileDataReader.Geometry; for (int i = 0; i < header.NumFields; i++) { DbaseFieldDescriptor fldDescriptor = header.Fields[i]; keys[i] = fldDescriptor.Name; // First Field Geometry var value = shapefileDataReader.GetValue(i + 1); attributesTable.Add(fldDescriptor.Name, value); } feature.Geometry = geometry; feature.Attributes = attributesTable; features.Add(feature); } return(features); }
public void GeoJsonWriterWriteAnyObjectTest() { AttributesTable attributes = new AttributesTable(); DateTime Date = new DateTime(2012, 8, 8).Date; JsonSerializer g = GeoJsonSerializer.CreateDefault(); StringBuilder sb = new StringBuilder(); using (StringWriter sw = new StringWriter(sb)) g.Serialize(sw, Date); string expectedDateString = sb.ToString(); string expectedResult = "{\"featureCollection\":{\"type\":\"FeatureCollection\",\"features\":[{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[23.0,56.0]},\"properties\":{\"test1\":\"value1\"}}],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"name1\"}}},\"Date\":" + expectedDateString + "}"; attributes.Add("test1", "value1"); IFeature feature = new Feature(new Point(23, 56), attributes); FeatureCollection featureCollection = new FeatureCollection(new Collection <IFeature> { feature }) { CRS = new NamedCRS("name1") }; var gjw = new GeoJsonWriter(); gjw.SerializerSettings.NullValueHandling = NullValueHandling.Ignore; string actual = gjw.Write(new { featureCollection, Date = Date }); Assert.AreEqual(expectedResult, actual); }
public void write_simple_point() { IPoint geometry = Factory.CreatePoint(new Coordinate(23.4, 56.7)); AttributesTable attributes = new AttributesTable(); attributes.Add("prop0", "value0"); attributes.Add("prop1", "value1"); IFeature feature = new Feature(geometry, attributes); TopoJsonWriter writer = new TopoJsonWriter(); string actual = writer.Write(feature); Assert.That(actual, Is.Not.Null); Assert.That(actual, Is.Not.Empty); Console.WriteLine(actual); Assert.That(actual, Is.EqualTo(TopoWriterData.SimplePoint)); }
/// <summary> /// Converts a single node that represents relevant network information to one or more features. /// </summary> /// <param name="node">A node.</param> /// <param name="extraTags">Merge in extra tags.</param> /// <returns>A feature collection representing the node info.</returns> public static FeatureCollection ToFeatureCollection(this Node node, IEnumerable <Tag>?extraTags = null) { var features = new FeatureCollection(); if (!node.Latitude.HasValue || !node.Longitude.HasValue) { return(features); } var attributes = new AttributesTable(); if (extraTags != null) { foreach (var t in extraTags) { if (attributes.Exists(t.Key)) { attributes[t.Key] = t.Value; } else { attributes.Add(t.Key, t.Value); } } } if (node.Tags != null) { foreach (var t in node.Tags) { if (attributes.Exists(t.Key)) { attributes[t.Key] = t.Value; } else { attributes.Add(t.Key, t.Value); } } } features.Add(new Feature(new Point(new Coordinate(node.Longitude.Value, node.Latitude.Value)), attributes)); return(features); }
/// <summary> /// Gets a features representing the vertex with the given id. /// </summary> public static Feature GetFeatureForVertex(this RouterDb routerDb, uint vertex) { var coordinate = routerDb.Network.GetVertex(vertex).ToCoordinate(); var attributes = new AttributesTable(); attributes.Add("id", vertex); return(new Feature(new Point(coordinate), attributes)); }
private IAttributesTable ConvertTags(ICompleteOsmGeo osmObject) { var table = new AttributesTable(osmObject.Tags.ToDictionary(t => t.Key, t => t.Value as object)) { { FeatureAttributes.ID, osmObject.GetId() } }; if (osmObject.TimeStamp.HasValue) { table.Add(FeatureAttributes.POI_LAST_MODIFIED, osmObject.TimeStamp.Value.ToString("o")); } if (!string.IsNullOrWhiteSpace(osmObject.UserName)) { table.Add(FeatureAttributes.POI_USER_NAME, osmObject.UserName); table.Add(FeatureAttributes.POI_USER_ADDRESS, $"https://www.openstreetmap.org/user/{Uri.EscapeUriString(osmObject.UserName)}"); } return(table); }
public void GeoJsonWriterWriteAttributesTest() { AttributesTable attributes = new AttributesTable(); attributes.Add("test1", "value1"); string actual = new GeoJsonWriter().Write(attributes); Assert.AreEqual("{\"test1\":\"value1\"}", actual); }
private static AttributesTable GetModifiedAttributes(IReadOnlyDictionary <BattalionAttribute, int> attributes, IEnumerable <BattalionStateModifier> modifiers) { AttributesTable newAttributes = new AttributesTable(attributes.ToDictionary(item => item.Key, item => item.Value)); foreach (BattalionStateModifier item in modifiers) { newAttributes.Add(item.Attribute, item.Modifier); } return(newAttributes); }
public void GeoJsonWriterWriteFeatureTest() { AttributesTable attributes = new AttributesTable(); attributes.Add("test1", "value1"); IFeature feature = new Feature(new Point(23, 56), attributes); string actual = new GeoJsonWriter().Write(feature); Assert.AreEqual("{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[23.0,56.0]},\"properties\":{\"test1\":\"value1\"}}", actual); }
/// <summary> /// Converts to instructions to features. /// </summary> public static FeatureCollection ToFeatureCollection(this IList <Instruction> instructions, Route route) { var features = new FeatureCollection(); for (var i = 0; i < instructions.Count; i++) { var instruction = instructions[i]; var coordinate = route.Shape[instruction.Shape].ToCoordinate(); var attributes = new AttributesTable(); attributes.Add("text", instruction.Text); attributes.Add("type", instruction.Type.ToInvariantString().ToLowerInvariant()); var point = new Point(coordinate); features.Add(new Feature(point, attributes)); } return(features); }