private List <Node> BuildNodes(LinkedList <Way> ways, List <bool> reversals) { var originalNodes = new List <Node>(); var idx = 0; foreach (var way in ways) { var nodes = way.Nodes.ToArray(); if (reversals[idx]) { nodes = this.Reverse(nodes); } var lastNode = nodes.Last(); originalNodes.Add(nodes[0]); foreach (var node in nodes.Skip(1)) { if (node != lastNode) { originalNodes.Add(node); } } idx++; } if (!RouteCleanerSettings.GetInstance().PolygonsShouldConsolidateStraightEdges) { return(originalNodes); } var dotProducts = new List <double>(); for (var i = 0; i < originalNodes.Count; i++) { dotProducts.Add(PolygonUtils.ComputeDotProduct(originalNodes, i, true)); } var newNodes = new List <Node>(); EliminatedNodes = new List <Node>(); // Keep nodes where the angle is not close enough to 180 degrees (based on threshold) for (var i = 0; i < dotProducts.Count; i++) { if (Math.Abs(dotProducts[i]) > FlattenThreshold) { newNodes.Add(originalNodes[i]); } else { EliminatedNodes.Add(originalNodes[i]); } } if (newNodes[0] == newNodes[newNodes.Count - 1]) { throw new System.Exception($"Node {newNodes[0]} duplicate at start and end."); } return(newNodes); }
/// <summary> /// Any TargetableWay that has too many sub ways should be divided. /// </summary> /// <param name="originalTargetableWays"></param> /// <returns></returns> private IEnumerable <TargetableWay> UnconsolidateLargeWays(List <TargetableWay> originalTargetableWays) { var maxWaysInTargetableWay = RouteCleanerSettings.GetInstance().MaxNumberOfWaysToConsolidate; foreach (var targetableWay in originalTargetableWays) { if (targetableWay.OriginalWays.Count > maxWaysInTargetableWay) { var i = 1; foreach (var way in targetableWay.OriginalWays) { // todo - be smarter about this. Build connected components then return those subject to max. yield return(new TargetableWay { Id = Guid.NewGuid().ToString(), Name = $"{targetableWay.Name}_{i++}", OriginalWays = new List <TargetableWay.OriginalWay> { way }, RegionId = targetableWay.RegionId, RegionName = targetableWay.RegionName, }); } } else { yield return(targetableWay); } } }
public static Dictionary <string, List <LookupTargetableWay> > CreateWayProtobufs() { var outputs = new Dictionary <string, List <LookupTargetableWay> >(); using var fs = File.Open(RouteCleanerSettings.GetInstance().TemporaryTargetableWaysLocation, FileMode.Open, FileAccess.Read, FileShare.Read); using var sr = new StreamReader(fs); long lineNum = 0; while (sr.Peek() >= 0) { var content = sr.ReadLine(); TargetableWay way; try { way = JsonConvert.DeserializeObject <TargetableWay>(content); lineNum++; } catch (Exception ex) { Console.WriteLine($"Exception {ex} on line {lineNum++}"); Console.WriteLine($"Line: {content}"); continue; } var keys = way.OriginalWays.SelectMany(x => x.Points).Select(point => OpenLocationCode.Encode(point.Latitude, point.Longitude, codeLength: 6)).Distinct(); var lookupTargetableWay = new LookupTargetableWay { Id = way.Id, Relation = way.RegionId, RelationName = way.RegionName, WayName = way.Name, }; lookupTargetableWay.OriginalWays.AddRange(way.OriginalWays.Select(x => { var l = new LookupOriginalWay { Id = x.Id }; l.NodeIds.AddRange(x.Points.Select(xx => xx.Id)); l.NodeLatitudes.AddRange(x.Points.Select(xx => xx.Latitude)); l.NodeLongitudes.AddRange(x.Points.Select(xx => xx.Longitude)); return(l); })); foreach (var key in keys) { if (!outputs.ContainsKey(key)) { outputs.Add(key, new List <LookupTargetableWay>()); } outputs[key].Add(lookupTargetableWay); } } return(outputs); }
private StreamWriter GetStreamWriter(string key) { System.IO.Directory.CreateDirectory(RouteCleanerSettings.GetInstance().TemporaryNodeWithContainingWayOutLocation); var fullPath = Path.Combine(RouteCleanerSettings.GetInstance().TemporaryNodeWithContainingWayOutLocation, key + ".json"); Console.WriteLine($"Opening path {fullPath}"); var fs = File.Open(fullPath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read); return(new StreamWriter(fs, Encoding.UTF8, 65536)); }
public static IEnumerable <(string, List <LookupNode>)> CreateLookupNodeProtobufs() { var folder = RouteCleanerSettings.GetInstance().TemporaryNodeWithContainingWayOutLocation; var allFiles = Directory.GetFiles(folder); foreach (var file in allFiles) { Console.WriteLine($"Working on {file}"); var outputs = new Dictionary <string, List <LookupNode> >(); string line; var sr = new StreamReader(file); long lineNum = 0; while ((line = sr.ReadLine()) != null) { Node node; try { node = JsonConvert.DeserializeObject <Node>(line); lineNum++; } catch (Exception ex) { Console.WriteLine($"Exception {ex} on line {lineNum++}"); Console.WriteLine($"Line: {line}"); continue; } var location = new OpenLocationCode(node.Latitude, node.Longitude, codeLength: 6); if (!outputs.ContainsKey(location.Code)) { outputs.Add(location.Code, new List <LookupNode>()); } var lNode = new LookupNode { Id = node.Id, Latitude = node.Latitude, Longitude = node.Longitude }; lNode.Relations.AddRange(node.Relations); lNode.TargetableWays.AddRange(node.ContainingWays); outputs[location.Code].Add(lNode); } foreach (var kvp in outputs) { kvp.Value.Sort(SortNodesByLatLong); yield return(kvp.Key, kvp.Value); } } }
private async Task StreamNodesAsync(Dictionary <string, HashSet <string> > wayMap, DisposableDictionary <string, StreamWriter> streamWriters) { var lineCntr = 0; var relationRegion = GeometryFactory.GetRegionGeometry(RouteCleanerSettings.GetInstance().TemporaryBoundariesLocation, false, false); var relationTracker = new TrackRelationNodes(relationRegion.Relations); using var fs = File.Open(RouteCleanerSettings.GetInstance().TemporaryNodeOutLocation, FileMode.OpenOrCreate, FileAccess.Read, FileShare.Read); using (var sr = new StreamReader(fs)) { while (sr.Peek() >= 0) { var content = sr.ReadLine(); try { var node = JsonConvert.DeserializeObject <Node>(content); if (wayMap.TryGetValue(node.Id, out var ways)) { node.ContainingWays = ways.ToList(); } var code = new OpenLocationCode(node.Latitude, node.Longitude, codeLength: 2); if (!streamWriters.ContainsKey(code.Code)) { streamWriters.Add(code.Code, GetStreamWriter(code.Code)); } // update tracker with ways and relations from this node. relationTracker.AddNode(node); var line = JsonConvert.SerializeObject(node); line = Regex.Replace(line, @"\t|\n|\r", ""); streamWriters[code.Code].WriteLine(line); } catch (JsonReaderException e) { Console.WriteLine($"Could not deserialize line {lineCntr} {content}: {e.Message}"); } catch (JsonSerializationException e) { Console.WriteLine($"Could not deserialize line {lineCntr} {content}: {e.Message}"); } lineCntr++; } } var relationLines = relationTracker.GetRelationCounts().Select(x => JsonConvert.SerializeObject(x)); await File.WriteAllLinesAsync(RouteCleanerSettings.GetInstance().TemporaryRelationSummaryLocation, relationLines); }
public void RunChain(string boundariesFilePath, string runnableWaysPath) { var relationRegion = GeometryFactory.GetRegionGeometry(boundariesFilePath, false, false); var thread1 = Task <Dictionary <Relation, Polygon[]> > .Factory.StartNew(() => this.CreateRelationPolygons(relationRegion.Relations)); var thread2 = Task <Geometry> .Factory.StartNew(() => GeometryFactory.GetRegionGeometry(runnableWaysPath, true, false)); Task.WaitAll(thread1, thread2); var waysRegion = thread2.Result; var relationsDict = thread1.Result; var nodeStreamer = this.GetNodeStreamer(runnableWaysPath); var watch = Stopwatch.StartNew(); var createTargetableWays = new CreateTargetableWaysWithinRegions(waysRegion.Ways, relationRegion.Relations); var time = watch.Elapsed; Console.WriteLine($"Done prepping ways in {time}"); watch.Restart(); WriteNodesToDoc(createTargetableWays, relationsDict, nodeStreamer, RouteCleanerSettings.GetInstance().TemporaryNodeOutLocation); time = watch.Elapsed; Console.WriteLine($"Done with NodeContainment in {time} seconds."); Console.WriteLine($"Found {createTargetableWays.OutputWays.Count} targetableWays"); watch.Restart(); var ways = createTargetableWays.OutputWays; var w = ways.Where(w => w.Id == ""); ways = ConsolidateWays(ways); ways = UnconsolidateLargeWays(ways).ToList(); // todo is the problem? time = watch.Elapsed; Console.WriteLine($"Done with ConsolidatedWays in {time} seconds. Have {ways.Count} ways."); this.WriteWays(ways, RouteCleanerSettings.GetInstance().TemporaryTargetableWaysLocation); }
private Dictionary <string, HashSet <string> > BuildWayMap() { var lineCnt = 0; var nodeMap = new Dictionary <string, HashSet <string> >(); // node.id => {way.id} using var fs = File.Open(RouteCleanerSettings.GetInstance().TemporaryTargetableWaysLocation, FileMode.Open, FileAccess.Read, FileShare.Read); using var sr = new StreamReader(fs); while (sr.Peek() >= 0) { var content = sr.ReadLine(); try { var targetableWay = JsonConvert.DeserializeObject <TargetableWay>(content); foreach (var originalWay in targetableWay.OriginalWays) { foreach (var node in originalWay.Points) { if (!nodeMap.ContainsKey(node.Id)) { nodeMap.Add(node.Id, new HashSet <string>()); } if (!nodeMap[node.Id].Contains(targetableWay.Id)) { nodeMap[node.Id].Add(targetableWay.Id); } } } lineCnt++; } catch (JsonSerializationException e) { Console.WriteLine($"Could not deserialize way {lineCnt} {content}: {e.Message}"); } catch (JsonReaderException e) { Console.WriteLine($"Could not deserialize way {lineCnt} {content}: {e.Message}"); } } return(nodeMap); }
/// <summary> /// use n + 2 threads. /// Thread 0 will iterate through nodeStreamer and add to n queues in round robin. /// Threads 1-n will read a queue and process, writing to a queue. /// Thread n+1 will read all output queues and yield one at a time. /// </summary> /// <param name="relationsDict"></param> /// <param name="nodeStreamer"></param> /// <returns></returns> public IEnumerable <Node> ThreadedNodeContainment(Dictionary <Relation, Polygon[]> relationsDict, IEnumerable <Node> nodeStreamer) { var allDone = false; // shared - read but only written by the main thread. int numThreads = RouteCleanerSettings.GetInstance().NumThreads; var requestQueues = Enumerable.Range(0, numThreads).Select(_ => new ConcurrentQueue <Node>()).ToArray(); var responseQueues = Enumerable.Range(0, numThreads).Select(_ => new ConcurrentQueue <Node>()).ToArray(); var readThread = Task <int> .Factory.StartNew(() => { var numNodes = 0; foreach (var node in nodeStreamer) { requestQueues[numNodes % numThreads].Enqueue(node); numNodes++; if (numNodes % 10000 == 0) { var depths = requestQueues.Select(q => q.Count); var averageDepth = depths.Average(); while (averageDepth > 20000) { // Console.WriteLine($"Reader thread sleeping to let other threads catch up"); Thread.Sleep(RouteCleanerSettings.GetInstance().ReaderThreadSleepInterval); // give it a little time to cool off. depths = requestQueues.Select(q => q.Count); averageDepth = depths.Average(); } } } // when done, push a null to each queue Console.WriteLine($"Reader thread done"); foreach (var q in requestQueues) { q.Enqueue(null); } return(numNodes); }); var processedCount = new int[numThreads]; var processThreads = Enumerable.Range(0, numThreads).Select(processThreadIdx => Task <int> .Factory.StartNew(() => { Console.WriteLine($"Thread {processThreadIdx} reporting for duty"); while (true) { Node nodeToProcess = null; while (requestQueues[processThreadIdx].TryDequeue(out nodeToProcess)) // when queue is empty, we want to keep processing. When it has a null in it, we halt. Thus two whiles. { if (nodeToProcess == null) { // pusher will push a null when the queue is done. responseQueues[processThreadIdx].Enqueue(null); Console.WriteLine($"Thread {processThreadIdx} done"); return(processThreadIdx); } var containingRelations = relationsDict.Where(kvp => { var target = kvp.Key; var polygons = kvp.Value; foreach (var polygon in polygons) { if (PolygonContainment.Contains(polygon, nodeToProcess)) { return(polygon.IsOuter); } } return(false); }).Select(x => x.Key.Id); nodeToProcess.Relations.AddRange(containingRelations); responseQueues[processThreadIdx].Enqueue(nodeToProcess); processedCount[processThreadIdx]++; } Console.WriteLine($"Thread {processThreadIdx} failed to dequeue"); Thread.Sleep(1000); } })).ToArray(); // <-- that's important - otherwise these never actually happen! // print status thread var statusThread = Task.Factory.StartNew(() => { while (!allDone) { Thread.Sleep(10 * 1000); var queueDepths = requestQueues.Select(q => q.Count).Average(); var averageFinished = processedCount.Sum(); Console.WriteLine($"Checkin: {queueDepths} average depth with {averageFinished} processed."); } Console.WriteLine("Checking thread done."); }); // main thread writes var deadThreads = Enumerable.Range(0, numThreads).Select(_ => false).ToArray(); while (true) { var didWork = false; for (var i = 0; i < numThreads; i++) { if (!deadThreads[i]) { Node processedNode = null; if (responseQueues[i].TryDequeue(out processedNode)) { if (processedNode == null) { deadThreads[i] = true; } else { didWork = true; yield return(processedNode); } } } } if (deadThreads.All(x => x)) { break; } if (!didWork) { Thread.Sleep(1000); // if the queues are all empty, then wait for a little while. No sense having this thread spin. // maybe we should do message passing here? } } allDone = true; }