/// <summary> /// /// </summary> public FeatureCollection GetVO() { NetTopologySuite.IO.GeoJsonReader reader = new NetTopologySuite.IO.GeoJsonReader(); FeatureCollection result = reader.Read <FeatureCollection>(GEOJSON); return(result); }
public void AddRouteSegmentInfo(RouteSegmentInfo routeSegmentInfo) { _routeSegmentInfos.Add(routeSegmentInfo.Id, routeSegmentInfo); var reader = new NetTopologySuite.IO.GeoJsonReader(); // Calculate length var line = reader.Read <LineString>("{ \"type\": \"LineString\", \"coordinates\": " + routeSegmentInfo.Geometry.GeoJsonCoordinates + "}"); double length = 0; for (int i = 1; i < line.NumPoints; i++) { var sCoord = new GeoCoordinate(line.GetPointN(i - 1).Y, line.GetPointN(i - 1).X); var eCoord = new GeoCoordinate(line.GetPointN(i).Y, line.GetPointN(i).X); length += sCoord.GetDistanceTo(eCoord); } routeSegmentInfo.Length = length; // Add object relations to facilitate easier and faster lookup and traversal routeSegmentInfo.FromRouteNode = GetRouteNodeInfo(routeSegmentInfo.FromRouteNodeId); routeSegmentInfo.ToRouteNode = GetRouteNodeInfo(routeSegmentInfo.ToRouteNodeId); ((RouteNodeInfo)routeSegmentInfo.FromRouteNode).AddOutgoingSegment(routeSegmentInfo); ((RouteNodeInfo)routeSegmentInfo.ToRouteNode).AddIngoingSegment(routeSegmentInfo); }
string GetJson() { var data = ReadFile(); var reader = new NetTopologySuite.IO.GeoJsonReader(); var features = reader.Read <FeatureCollection>(data); var geoJson = GetGeoJsonText(features); return(geoJson); }
internal RouteNode(Guid routeNodeId, string name, RouteNodeKindEnum nodeKind, RouteNodeFunctionKindEnum functionKind, RouteNetwork.Events.Model.Geometry geometry, LocationInfo locationInfo) : this() { // Check that we got some valid geometry if (geometry == null) { throw new ArgumentException("Cannot create route node with id: " + routeNodeId + " because Geometry is null, which is not allowed."); } if (geometry.GeoJsonType == null) { throw new ArgumentException("Cannot create route node with id: " + routeNodeId + " because Geometry.GeoJsonType is null, which is not allowed."); } if (geometry.GeoJsonType.ToLower() != "point") { throw new ArgumentException("Cannot create route node with id: " + routeNodeId + " because Geometry.GeoJsonType is: " + geometry.GeoJsonType + ", which is not allowed in route nodes. Expected Point."); } if (geometry.GeoJsonCoordinates == null) { throw new ArgumentException("Cannot create route node with id: " + routeNodeId + " because Geometry.GeoJsonCoordinates is null, which is not allowed."); } // Try parse geojson var reader = new NetTopologySuite.IO.GeoJsonReader(); try { var point = reader.Read <Point>("{ \"type\": \"Point\", \"coordinates\": " + geometry.GeoJsonCoordinates + "}"); if (point == null) { throw new ArgumentException("Error parsing geometry: " + geometry); } } catch (Exception ex) { throw new ArgumentException("Error parsing geometry: " + geometry + " Got exception from NetTopologySuite: " + ex.Message, ex); } // Create domain event var routeNodeAdded = new RouteNodePlanned() { Id = routeNodeId, Name = name, NodeKind = nodeKind, NodeFunctionKind = functionKind, InitialGeometry = geometry, LocationInfo = locationInfo }; RaiseEvent(routeNodeAdded, false); }
private Point ConvertFromPointGeoJson(string geojson) { try { var reader = new NetTopologySuite.IO.GeoJsonReader(); var point = reader.Read <Point>("{ \"type\": \"Point\", \"coordinates\": " + geojson + "}"); return(point); } catch (Exception ex) { throw new ArgumentException("Error parsing geometry: " + geojson + " Got exception from NetTopologySuite: " + ex.Message, ex); } }
private LineString ConvertFromLineGeoJson(string geojson) { try { var reader = new NetTopologySuite.IO.GeoJsonReader(); var line = reader.Read <LineString>("{ \"type\": \"LineString\", \"coordinates\": " + geojson + "}"); return(line); } catch (Exception ex) { throw new ArgumentException("Error parsing geometry: " + geojson + " Got exception from NetTopologySuite: " + ex.Message, ex); } }
public void TestIssue78() { string jsonFile = TestUtility.GetPathToTestFile("FeatureCollection.json"); if (!System.IO.File.Exists(jsonFile)) { Assert.Ignore("Test file {0} not present.", jsonFile); } string json = System.IO.File.ReadAllText(jsonFile); var env = new Envelope(); using (var map = new SharpMap.Map(new System.Drawing.Size(800, 400))) { map.Layers.Add(new SharpMap.Layers.TileLayer( BruTile.Predefined.KnownTileSources.Create(BruTile.Predefined.KnownTileSource.BingRoadsStaging, string.Empty), "BingRoad")); var rss = Newtonsoft.Json.Linq.JObject.Parse(json); var jsonReader = new NetTopologySuite.IO.GeoJsonReader(); foreach (var shape in rss["features"]) { var feature = jsonReader.Read <IFeature>(shape.ToString(Newtonsoft.Json.Formatting.None)); var geom = feature.Geometry; var fp = new SharpMap.Data.Providers.GeometryFeatureProvider(geom); var layer = new SharpMap.Layers.VectorLayer("geojson", fp); layer.CoordinateTransformation = new ProjNet.CoordinateSystems.Transformations.CoordinateTransformationFactory().CreateFromCoordinateSystems( ProjNet.CoordinateSystems.GeographicCoordinateSystem.WGS84, ProjNet.CoordinateSystems.ProjectedCoordinateSystem.WebMercator); layer.Style = new SharpMap.Styles.VectorStyle { Fill = new System.Drawing.SolidBrush(System.Drawing.Color.FromArgb(100, 255, 0, 0)), Outline = new System.Drawing.Pen(System.Drawing.Color.Red, 1.5f), EnableOutline = true }; env.ExpandToInclude(layer.Envelope); map.Layers.Add(layer); } map.ZoomToBox(env); map.Zoom *= 1.1; using (var img = map.GetMap()) img.Save(System.IO.Path.Combine(UnitTestsFixture.GetImageDirectory(this), $"TestIssue78.png")); } }
/// <summary> /// 更新/修改/创建楼宇信息 /// </summary> /// <param name="geoJsonText"></param> /// <returns></returns> async Task <string> IBMSHY.GeoDataLYXXUpdate(string geoJsonText) { NetTopologySuite.IO.GeoJsonReader reader = new NetTopologySuite.IO.GeoJsonReader(); FeatureCollection result = reader.Read <FeatureCollection>(geoJsonText); if (result == null) { return(new FailResponse("数据校验失败").ToString()); } _lyxx.State.GEOJSON = geoJsonText; await _lyxx.WriteStateAsync(); return(new OkResponse("修改土地信息数据集成功").ToString()); }
static void Main(string[] args) { Console.OutputEncoding = Encoding.UTF8; var inputFile = "google_export.kmz"; var outputPath = "../hugo/content/shops/"; var outputDirectory = Directory.CreateDirectory(outputPath); Console.WriteLine("Cleaning up old files..."); foreach (var file in outputDirectory.EnumerateFiles("*.html")) { file.Delete(); } Console.WriteLine("Done."); var generatedPostDateString = "2019-06-09"; var colourMapping = getShopColourMapping(); var shopTypeMapping = getShopTypeMapping(); var districtBoundaryJson = File.ReadAllText("hksar_18_district_boundary.json", Encoding.UTF8); var geoJsonReader = new NetTopologySuite.IO.GeoJsonReader(); var featureCollection = geoJsonReader.Read <NetTopologySuite.Features.FeatureCollection>(districtBoundaryJson); var districtLocators = new Dictionary <string, IPointOnGeometryLocator>(featureCollection.Count + 1); // custom polygon for shops on map that have no physical presence var virtualShopCoordinates = new Coordinate[] { new Coordinate(114.0566591, 22.3235484), new Coordinate(114.1542176, 22.3105200), new Coordinate(114.1533560, 22.2981202), new Coordinate(114.1337037, 22.2922693), new Coordinate(114.1169433, 22.2812754), new Coordinate(114.1167716, 22.2423337), new Coordinate(114.0575151, 22.2418568), new Coordinate(114.0566591, 22.3235484), }; var virtualShopPolygon = new NetTopologySuite.Geometries.Polygon(new NetTopologySuite.Geometries.LinearRing(virtualShopCoordinates)); districtLocators.Add("網上商店", new IndexedPointInAreaLocator(virtualShopPolygon)); foreach (NetTopologySuite.Features.Feature feature in featureCollection) { districtLocators.Add(feature.Attributes["地區"].ToString(), new IndexedPointInAreaLocator(feature.Geometry)); } Stopwatch sw = Stopwatch.StartNew(); var counter = 0; { KmlFile kmlFile; using (var zipStream = File.OpenRead(inputFile)) using (var archive = new ZipArchive(zipStream, ZipArchiveMode.Read)) { var entry = archive.GetEntry("doc.kml"); using (var kmlStream = entry.Open()) using (var kmlStreamReader = new StreamReader(kmlStream)) using (var memoryStream = new MemoryStream()) using (var memoryStreamWriter = new StreamWriter(memoryStream)) { char[] buffer = new char[1024]; var bufferSpan = new Span <char>(buffer); int charactersRead; while ((charactersRead = kmlStreamReader.Read(bufferSpan)) > 0) { foreach (var character in bufferSpan.Slice(0, charactersRead)) { // remove invalid control characters from XML if (!Char.IsControl(character)) { memoryStreamWriter.Write(character); } } } memoryStreamWriter.Flush(); memoryStream.Seek(0, SeekOrigin.Begin); kmlFile = KmlFile.Load(memoryStream); } } if (kmlFile.Root is Kml kml) { // sample styleUrl: #icon-1507-0288D1 var styleUrlRegex = new Regex(@"^#icon-(\d+)-(\w+)"); // handle shop name collision cases var nameCountDict = new Dictionary <string, int>(); var invalidFileNameChars = new HashSet <char> { '<', '>', ':', '"', '/', '\\', '|', '?', '*', '#', '.', '\r', '\n' }; var reasonToEndRegex = new Regex(@"\b(原因|官方資訊)[\s\S]+", RegexOptions.Compiled); var hyperlinkRegex = new Regex(@"\b(?<!"")(https?://[^ <>()""]+)", RegexOptions.Compiled); foreach (var placemark in kml.Flatten().OfType <Placemark>()) // for testing: .Take(200)) { var lat = 0.0; var lng = 0.0; var district = "不詳"; if (placemark.Geometry is SharpKml.Dom.Point point) { lat = point.Coordinate.Latitude; lng = point.Coordinate.Longitude; foreach (var entry in districtLocators) { if (PointOnGeometryLocatorExtensions.Intersects(entry.Value, new NetTopologySuite.Geometries.Coordinate(lng, lat))) { district = entry.Key; break; } } } var nameBuilder = new StringBuilder(100); var fileNameBuilder = new StringBuilder(100); foreach (char c in placemark.Name) { // remove newlines for shop name if (c != '\r' && c != '\n') { nameBuilder.Append(c); } // remove invalid chars for file name if (!invalidFileNameChars.Contains(c)) { if (c == ' ') { // replace space in file name fileNameBuilder.Append('-'); } else { fileNameBuilder.Append(c); } } } // trim both names and prepend lat-lng to file name var name = nameBuilder.ToString().Trim(); var fileName = $"{lat.ToString("0.000")}-{lng.ToString("0.000")}-{fileNameBuilder.ToString().Trim('-')}"; // truncate file name to avoid crashing netlify build var fileNameStringInfo = new StringInfo(fileName); if (fileNameStringInfo.LengthInTextElements > 70) { fileName = fileNameStringInfo.SubstringByTextElements(0, 70) + "…"; } if (nameCountDict.TryGetValue(fileName, out int existingCount)) { int count = existingCount + 1; nameCountDict[fileName] = count; fileName = $"{fileName}-{count}"; } else { nameCountDict[fileName] = 1; } var colour = ""; var type = ""; string styleUrlString = placemark.StyleUrl.ToString(); var match = styleUrlRegex.Match(styleUrlString); if (match.Success) { // 1: shop type if (shopTypeMapping.TryGetValue(match.Groups[1].Value, out string shopType)) { type = shopType; } else { Console.WriteLine($"Unknown shop type: {match.Groups[1].Value} for {name}"); type = "種類不明"; } // 2: shop colour colour = colourMapping[match.Groups[2].Value]; } var description = placemark.Description?.Text; if (description == null) { Console.WriteLine($"Warning: Null description encountered: {name}"); description = ""; } using (var fileWriter = new StreamWriter(Path.Combine(outputPath, $"{fileName}.html"), false)) { // front matter fileWriter.WriteLine("---"); fileWriter.WriteLine($"title: '{name.Replace("'", "''")}'"); fileWriter.WriteLine($"date: {generatedPostDateString}"); fileWriter.WriteLine($"districts: {district}"); fileWriter.WriteLine($"colours: {colour}"); fileWriter.WriteLine($"categories: {type}"); fileWriter.WriteLine($"lat: {lat}"); fileWriter.WriteLine($"lng: {lng}"); fileWriter.WriteLine("---"); description = hyperlinkRegex.Replace(description, "<a href=\"$1\">$1</a>"); // description HTML if (colour == "黃店") { fileWriter.WriteLine(reasonToEndRegex.Replace(description, "")); } else { fileWriter.WriteLine(description); } } if (++counter % 100 == 0) { Console.WriteLine($"Processed {counter} records"); } } } } sw.Stop(); Console.WriteLine($"Total time taken to process {counter} records: {sw.Elapsed.TotalSeconds} seconds"); }
/// <summary> /// Loads a test network from geojson. /// </summary> public static void LoadTestNetwork(this RouterDb db, string geoJson, float tolerance = 20) { var geoJsonReader = new NetTopologySuite.IO.GeoJsonReader(); var features = geoJsonReader.Read <FeatureCollection>(geoJson); foreach (var feature in features.Features) { if (feature.Geometry is Point) { var point = feature.Geometry as Point; uint id; if (feature.Attributes.Exists("id") && uint.TryParse(feature.Attributes["id"].ToInvariantString(), out id)) { // has and id, add as vertex. db.Network.AddVertex(id, (float)point.Coordinate.Y, (float)point.Coordinate.X); } } } foreach (var feature in features.Features) { if (feature.Geometry is LineString) { if (feature.Attributes.Contains("restriction", "yes")) { continue; } var line = feature.Geometry as LineString; var profile = new Itinero.Attributes.AttributeCollection(); var names = feature.Attributes.GetNames(); foreach (var name in names) { if (!name.StartsWith("meta:") && !name.StartsWith("stroke")) { profile.AddOrReplace(name, feature.Attributes[name].ToInvariantString()); } } var meta = new Itinero.Attributes.AttributeCollection(); foreach (var name in names) { if (name.StartsWith("meta:")) { meta.AddOrReplace(name.Remove(0, "meta:".Length), feature.Attributes[name].ToInvariantString()); } } var profileId = db.EdgeProfiles.Add(profile); var metaId = db.EdgeMeta.Add(meta); var vertex1 = db.SearchVertexFor( (float)line.Coordinates[0].Y, (float)line.Coordinates[0].X, tolerance); var distance = 0.0; var shape = new List <Coordinate>(); for (var i = 1; i < line.Coordinates.Length; i++) { var vertex2 = db.SearchVertexFor( (float)line.Coordinates[i].Y, (float)line.Coordinates[i].X, tolerance); distance += Coordinate.DistanceEstimateInMeter( (float)line.Coordinates[i - 1].Y, (float)line.Coordinates[i - 1].X, (float)line.Coordinates[i].Y, (float)line.Coordinates[i].X); if (vertex2 == Itinero.Constants.NO_VERTEX) { // add this point as shapepoint. shape.Add(line.Coordinates[i].FromCoordinate()); continue; } db.Network.AddEdge(vertex1, vertex2, new Itinero.Data.Network.Edges.EdgeData() { Distance = (float)distance, MetaId = metaId, Profile = (ushort)profileId }, shape); shape.Clear(); vertex1 = vertex2; distance = 0; } } } foreach (var feature in features.Features) { if (feature.Geometry is LineString && feature.Attributes.Contains("restriction", "yes")) { var line = feature.Geometry as LineString; var sequence = new List <uint>(); sequence.Add(db.SearchVertexFor( (float)line.Coordinates[0].Y, (float)line.Coordinates[0].X, tolerance)); for (var i = 1; i < line.Coordinates.Length; i++) { sequence.Add(db.SearchVertexFor( (float)line.Coordinates[i].Y, (float)line.Coordinates[i].X, tolerance)); } var vehicleType = string.Empty; if (!feature.Attributes.TryGetValueAsString("vehicle_type", out vehicleType)) { vehicleType = string.Empty; } RestrictionsDb restrictions; if (!db.TryGetRestrictions(vehicleType, out restrictions)) { restrictions = new RestrictionsDb(); db.AddRestrictions(vehicleType, restrictions); } restrictions.Add(sequence.ToArray()); } } }
public static async Task <List <PolyInfo> > InitGeoJSON() { var regions = new List <PolyInfo>(); //reading json from resources string jsonData; using (var stream = await FileSystem.OpenAppPackageFileAsync("file.geojson")) { using (var fileReader = new StreamReader(stream)) { jsonData = await fileReader.ReadToEndAsync(); } } // creating an NTS geojson reader var reader = new NetTopologySuite.IO.GeoJsonReader(); // read FeatureCollection from geojson var featureCollection = reader.Read <GeoJSON.Net.Feature.FeatureCollection>(jsonData); // loop through and convert all features for (int featureIndex = 0; featureIndex < featureCollection.Features.Count; featureIndex++) { // get feature var jsonFeature = featureCollection.Features[featureIndex]; PolyInfo curr; //apparently "unversal code", although we wont need anything except MultiPolygon and Polygon switch (jsonFeature.Geometry.Type) { case GeoJSONObjectType.Point: break; case GeoJSONObjectType.MultiPoint: break; case GeoJSONObjectType.LineString: break; case GeoJSONObjectType.MultiLineString: break; case GeoJSONObjectType.MultiPolygon: { var multiPolygon = jsonFeature.Geometry as GeoJSON.Net.Geometry.MultiPolygon; var curProperties = new Dictionary <string, object>(jsonFeature.Properties); curr = new PolyInfo(Convert.ToInt32(curProperties["HR_UID"]), curProperties["FRENAME"].ToString(), curProperties["ENGNAME"].ToString()); //list of polygons that will be filled var polys = new List <Polygon>(); //coordinate array for outer shell only var lRing = new List <Coordinate>(); //creating an array of linear rings var rings = new List <LinearRing>(); LinearRing shell; foreach (var poly in multiPolygon.Coordinates) { foreach (var ring in poly.Coordinates) { if (ring.IsLinearRing()) { //building an array of coordinates per ring, a foreach (var coordinate in ring.Coordinates) { var location = coordinate as GeoJSON.Net.Geometry.Position; if (location == null) { continue; } lRing.Add(new Coordinate(location.Latitude, location.Longitude)); } // adding that array into the list of LinearRings rings.Add(new LinearRing(lRing.ToArray())); lRing.Clear(); } } shell = rings[0]; rings.RemoveAt(0); polys.Add(rings.Count == 0 ? new Polygon(shell) : new Polygon(shell, rings.ToArray())); shell = null; rings.Clear(); lRing.Clear(); } curr.geom = new NetTopologySuite.Geometries.MultiPolygon(polys.ToArray()); regions.Add(curr); } break; case GeoJSONObjectType.Polygon: { //creating a dictionary for all the properties of the current Feature var curProperties = new Dictionary <string, object>(jsonFeature.Properties); curr = new PolyInfo(Convert.ToInt32(curProperties["HR_UID"]), curProperties["FRENAME"].ToString(), curProperties["ENGNAME"].ToString()); //convert the polygon to GeoJSON Polygon type var polygon = jsonFeature.Geometry as GeoJSON.Net.Geometry.Polygon; //output number of linear rings in the polygon // Console.WriteLine($"Number of linear rings in polygon: {polygon.Coordinates.Count}"); //coordinate array for outer shell only var lRing = new List <Coordinate>(); var rings = new List <LinearRing>(); LinearRing shell; foreach (var ring in polygon.Coordinates) { if (ring.IsLinearRing()) { //building an array of coordinates per ring, a foreach (var coordinate in ring.Coordinates) { var location = coordinate as GeoJSON.Net.Geometry.Position; if (location == null) { continue; } lRing.Add(new Coordinate(location.Latitude, location.Longitude)); } // adding that array into the list of LinearRings rings.Add(new LinearRing(lRing.ToArray())); lRing.Clear(); } } // separate shell from others shell = rings[0]; rings.RemoveAt(0); // create final geometry piece (polygon) curr.geom = rings.Count == 0 ? new Polygon(shell) : new Polygon(shell, rings.ToArray()); regions.Add(curr); } break; case GeoJSONObjectType.GeometryCollection: break; case GeoJSONObjectType.Feature: break; case GeoJSONObjectType.FeatureCollection: break; default: throw new ArgumentOutOfRangeException(); } } return(regions); }
private void filterGeoPosition(String fileName, double minX, double maxX, double minY, double maxY) { var jsonDoc = ""; var batch = new List <string>(); var boundingBox = new NetTopologySuite.Geometries.Envelope(minX, maxX, minY, maxY); var feature = new NetTopologySuite.Features.Feature(); using (FileStream s = File.Open(fileName, FileMode.Open)) using (var streamReader = new StreamReader(s)) { var file = Path.GetFileNameWithoutExtension(fileName).Split("."); var topicname = file[1]; using (var jsonreader = new Newtonsoft.Json.JsonTextReader(streamReader)) { while (jsonreader.Read()) { var reader = new NetTopologySuite.IO.GeoJsonReader(); if (jsonreader.TokenType == Newtonsoft.Json.JsonToken.StartObject) { while (jsonreader.Read()) { if (jsonreader.TokenType == Newtonsoft.Json.JsonToken.StartArray) { while (jsonreader.Read()) { try { if (jsonreader != null) { feature = reader.Read <NetTopologySuite.Features.Feature>(jsonreader); } var geo = feature.Geometry; var atr = feature.Attributes; if (boundingBox.Intersects(geo.EnvelopeInternal)) { jsonDoc = createGeoObject(atr, geo); batch.Add(jsonDoc); if (batch.Count >= 5000) { _producer.Produce(topicname, batch); _logger.LogInformation("Wrote " + batch.Count + " objects into " + topicname); batch.Clear(); } } } //Loop gives reader exception when it reaches the last element from the file catch (Newtonsoft.Json.JsonReaderException e) { _logger.LogError("Error writing data: {0}.", e.GetType().Name); var geo = feature.Geometry; var atr = feature.Attributes; jsonDoc = createGeoObject(atr, geo); batch.Add(jsonDoc); _producer.Produce(topicname, batch); _logger.LogInformation("Wrote " + batch.Count + " objects into " + topicname); batch.Clear(); break; } } } } } if (batch != null) { _producer.Produce(topicname, batch); _logger.LogInformation("Wrote " + batch.Count + " objects into " + topicname); batch.Clear(); } } } } }
/// <summary> /// Processes a GeoJSON file and uploads it to a db /// </summary> /// <param name="dbCtx"></param> /// <param name="path">Path to a directory a GeoJSON has been uploaded to</param> /// <param name="dsc"></param> /// <returns></returns> public static async Task <DataStore> ProcessGeoJson(DbContext dbCtx, string path, DataSourceCredentials dsc) { //assuming a single zip can only be present in a directory, as uploading data for a single layer //if there is a zip archive, need to extract it ExtractZip(path); //test for required shp format files presence... var geoJson = Directory.GetFiles(path, "*.geojson").FirstOrDefault(); if (string.IsNullOrEmpty(geoJson)) { throw MapHive.Core.DataModel.Validation.Utils.GenerateValidationFailedException("GeoJSON", "no_geojson_file", "GeoJSON file has not been found"); } var fName = Path.GetFileNameWithoutExtension(geoJson); var output = GetDataStore(fName, "geojson", dsc); var jsonData = File.ReadAllText(geoJson); var geoJsonReader = new NetTopologySuite.IO.GeoJsonReader(); // pass geoJson's FeatureCollection to read all the features var featureCollection = geoJsonReader.Read <GeoJSON.Net.Feature.FeatureCollection>(jsonData); // if feature collection is null then return if (featureCollection == null) { throw MapHive.Core.DataModel.Validation.Utils.GenerateValidationFailedException("GeoJSON", "no_data", "GeoJSON file could not be deserialized"); } //grab bbox - assume it to be in 4326 //(output.MinX, output.MinY, output.MaxX, output.MaxY) = ExtractGeoJsonBbox(featureCollection); //Note: calculating bbox after import succeeded //first work out a data model - this is json, so can have different props per object foreach (var f in featureCollection.Features) { foreach (var fProperty in f.Properties) { if (output.DataSource.Columns.Any(c => c.Name == GetSafeDbObjectName(fProperty.Key))) { continue; } if (!CheckIfObjectSafe(fProperty.Key)) { throw MapHive.Core.DataModel.Validation.Utils.GenerateValidationFailedException("ColName", "bad_col_name", "Column name contains forbidden words"); } var colType = SystemTypeToColumnDataType(fProperty.Value.GetType()); if (colType != ColumnDataType.Unknown) { output.DataSource.Columns.Add(new Column { Type = colType, Name = GetSafeDbObjectName(fProperty.Key), FriendlyName = fProperty.Key }); } } } //Since reading GeoJSON, there is also a geometry column output.DataSource.Columns.Add(new Column { Type = ColumnDataType.Geom, Name = "geom", FriendlyName = "Geom" }); //create object straight away, so when something goes wrong with import, etc. there is a chance for a cleanup bot //to pick it up and cleanup the orphaned data when necessary await output.CreateAsync(dbCtx); //once have the shp meta, can create a db table for the data var ensureSchemaSql = GetEnsureSchemaSql(output); var createTableSql = GetCreateTableSql(output); //geometry col index var geomIdxSql = GetCreateGeomIdxSql(output); using (var conn = new NpgsqlConnection(output.DataSource.DataSourceCredentials.GetConnectionString())) using (var cmd = new NpgsqlCommand()) { await conn.OpenAsync(); cmd.Connection = conn; cmd.CommandText = ensureSchemaSql; await cmd.ExecuteNonQueryAsync(); cmd.CommandText = createTableSql; await cmd.ExecuteNonQueryAsync(); //table ready, so pump in the data //assume the geoms to be in EPSG:4326 (GeoJSON :) var batchSize = 25; var processed = 0; var insertData = new List <string>(batchSize); foreach (var f in featureCollection.Features) { if (processed > 0 && processed % batchSize == 0) { await ExecuteInsertBatch(cmd, output, insertData); } processed += 1; var data = new string[f.Properties.Count + 1]; var pIdx = 0; foreach (var c in output.DataSource.Columns) { if (c.Type == ColumnDataType.Geom) { continue; } data[pIdx] = $"@r{processed}_p{pIdx}"; if (f.Properties.ContainsKey(c.FriendlyName)) { if (f.Properties[c.FriendlyName] != null) { cmd.Parameters.AddWithValue(data[pIdx], f.Properties[c.FriendlyName]); } else { cmd.Parameters.Add(GetDbNullValueParamForColumn(data[pIdx], c)); } } else { cmd.Parameters.Add(GetDbNullValueParamForColumn(data[pIdx], c)); } pIdx += 1; } //geom data[data.Length - 1] = $"ST_Transform(ST_SetSRID(ST_GeomFromGeoJSON('{JsonConvert.SerializeObject(f.Geometry)}'),4326),3857)"; //assume geojson to be always in 4326 and transform it to spherical mercator! insertData.Add($"SELECT {string.Join(",", data)}"); } if (insertData.Count > 0) { await ExecuteInsertBatch(cmd, output, insertData); } //when ready, index the geom col, so geom reads are quicker cmd.CommandText = geomIdxSql; await cmd.ExecuteNonQueryAsync(); await CalculateAndApplyBBox(cmd, output); //location col indexing as required await CreateLocationIndex(cmd, output); } return(output); }
/// <summary> /// Loads a test network from geojson. /// </summary> public static void LoadTestNetwork(this RouterDb db, string geoJson) { var geoJsonReader = new NetTopologySuite.IO.GeoJsonReader(); var features = geoJsonReader.Read <FeatureCollection>(geoJson); foreach (var feature in features.Features) { if (feature.Geometry is Point) { var point = feature.Geometry as Point; uint id; if (feature.Attributes.Exists("id") && uint.TryParse(feature.Attributes["id"].ToInvariantString(), out id)) { // has and id, add as vertex. db.Network.AddVertex(id, (float)point.Coordinate.Y, (float)point.Coordinate.X); } } } foreach (var feature in features.Features) { if (feature.Geometry is LineString) { var line = feature.Geometry as LineString; var profile = new Itinero.Attributes.AttributeCollection(); var names = feature.Attributes.GetNames(); foreach (var name in names) { if (!name.StartsWith("meta:") && !name.StartsWith("stroke")) { profile.AddOrReplace(name, feature.Attributes[name].ToInvariantString()); } } var meta = new Itinero.Attributes.AttributeCollection(); foreach (var name in names) { if (name.StartsWith("meta:")) { meta.AddOrReplace(name.Remove(0, "meta:".Length), feature.Attributes[name].ToInvariantString()); } } var profileId = db.EdgeProfiles.Add(profile); var metaId = db.EdgeMeta.Add(meta); var vertex1 = db.SearchVertexFor( (float)line.Coordinates[0].Y, (float)line.Coordinates[0].X); var distance = 0.0; var shape = new List <Coordinate>(); for (var i = 1; i < line.Coordinates.Length; i++) { var vertex2 = db.SearchVertexFor( (float)line.Coordinates[i].Y, (float)line.Coordinates[i].X); distance += Coordinate.DistanceEstimateInMeter( (float)line.Coordinates[i - 1].Y, (float)line.Coordinates[i - 1].X, (float)line.Coordinates[i].Y, (float)line.Coordinates[i].X); if (vertex2 == Itinero.Constants.NO_VERTEX) { // add this point as shapepoint. shape.Add(line.Coordinates[i].FromCoordinate()); continue; } db.Network.AddEdge(vertex1, vertex2, new Itinero.Data.Network.Edges.EdgeData() { Distance = (float)distance, MetaId = metaId, Profile = (ushort)profileId }, shape); shape.Clear(); vertex1 = vertex2; distance = 0; } } } //var features = GeoJsonConverter.ToFeatureCollection(geoJson); //foreach (var feature in features) //{ // if (feature.Geometry is Point) // { // var point = feature.Geometry as Point; // uint id; // if (feature.Attributes.ContainsKey("id") && // uint.TryParse(feature.Attributes["id"].ToInvariantString(), out id)) // { // has and id, add as vertex. // db.Network.AddVertex(id, // (float)point.Coordinate.Latitude, // (float)point.Coordinate.Longitude); // } // } //} //foreach (var feature in features) //{ // if (feature.Geometry is LineString) // { // var line = feature.Geometry as LineString; // var profile = new TagsCollection(); // foreach (var attribute in feature.Attributes) // { // if (!attribute.Key.StartsWith("meta:") && // !attribute.Key.StartsWith("stroke")) // { // profile.Add(attribute.Key, attribute.Value.ToInvariantString()); // } // } // var meta = new TagsCollection(); // foreach (var attribute in feature.Attributes) // { // if (attribute.Key.StartsWith("meta:")) // { // meta.Add(attribute.Key.Remove(0, "meta:".Length), // attribute.Value.ToInvariantString()); // } // } // var profileId = db.EdgeProfiles.Add(profile); // var metaId = db.EdgeMeta.Add(meta); // var vertex1 = db.SearchVertexFor( // (float)line.Coordinates[0].Latitude, // (float)line.Coordinates[0].Longitude); // var distance = 0.0; // var shape = new List<ICoordinate>(); // for (var i = 1; i < line.Coordinates.Count; i++) // { // var vertex2 = db.SearchVertexFor( // (float)line.Coordinates[i].Latitude, // (float)line.Coordinates[i].Longitude); // distance += GeoCoordinate.DistanceEstimateInMeter(line.Coordinates[i - 1], // line.Coordinates[i]); // if (vertex2 == Itinero.Constants.NO_VERTEX) // { // add this point as shapepoint. // shape.Add(line.Coordinates[i]); // continue; // } // db.Network.AddEdge(vertex1, vertex2, new Routing.Network.Data.EdgeData() // { // Distance = (float)distance, // MetaId = metaId, // Profile = (ushort)profileId // }, shape); // shape.Clear(); // vertex1 = vertex2; // distance = 0; // } // } //} }