static void ConvertDataset() { var list = FileUtil.ReadFile(@"c:\temp\temp.csv"); var dataset = new Points(); foreach (var s in list) { var arr = s.Split(';'); if (arr.Length != 4) { continue; } double x = arr[0].ToDouble(); double y = arr[1].ToDouble(); var i = arr[2].ToInt(); var t = arr[3].ToInt(); dataset.Add(new P { X = x, Y = y, I = i, T = t }); } FileUtil.SaveDataSetToFile(dataset, "temp.ser"); }
private static IPoints LoadDatasetFromCsv(string websitepath) { var filepath = websitepath; var fi = new FileInfo(websitepath); if (!fi.Exists) { throw new ApplicationException("File does not exists: " + fi.FullName); } var list = FileUtil.ReadFile(filepath); IPoints dataset = new Points(); foreach (var s in list) { var arr = s.Split(new[] { ";" }, StringSplitOptions.RemoveEmptyEntries); if (arr.Length != 4) continue; var x = arr[0].ToDouble(); var y = arr[1].ToDouble(); var i = arr[2].ToInt(); var t = arr[3].ToInt(); dataset.Add(new P { X = x, Y = y, I = i, T = t }); } dataset.Normalize(); return dataset; }
public Bucket(string id) { IsUsed = true; Centroid = null; Points = new Points(); Id = id; }
public Bucket(int idx, int idy, string id) { IsUsed = true; Centroid = null; Points = new Points(); Idx = idx; Idy = idy; Id = id; }
// lat lon points data static void LatLonParse() { //http://download.geonames.org/export/dump/ var rand = new Random(); const string name = "cities1000"; var lines = FileUtil.ReadFile(string.Format("c:\\temp\\{0}.txt", name)); var dataset = new Points(); const int numOfType = 3; foreach (var line in lines) { var delimiters = new[] { ' ', '\t' }; var arr = line.Split(delimiters, StringSplitOptions.RemoveEmptyEntries); if (arr.Length < 10) { continue; } double?lon = null; double?lat = null; var id = arr[0].ToInt(); for (var i = 1; i < arr.Length - 2; i++) { double d1, d2, d3; var dp1 = Double.TryParse(arr[i], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d1); var dp2 = Double.TryParse(arr[i + 1], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d2); var dp3 = Double.TryParse(arr[i + 2], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d3); if (dp1 && dp2 && dp3) { lat = d2; lon = d3; break; } if (dp1 && dp2) { lat = d1; lon = d2; break; } } if (lon.HasValue && lat.HasValue && MathTool.IsLonValid(lon.Value) && MathTool.IsLatValid(lat.Value)) { dataset.Add(new P { X = lon.Value, Y = lat.Value, I = id, T = (rand.Next(numOfType) + 1) }); } } SaveCsvData(dataset, new FileInfo(string.Format("c:\\temp\\{0}.csv", name))); }
static void ConvertDataset() { var list = FileUtil.ReadFile(@"c:\temp\temp.csv"); var dataset = new Points(); foreach (var s in list) { var arr = s.Split(';'); if (arr.Length != 4) continue; double x = arr[0].ToDouble(); double y = arr[1].ToDouble(); var i = arr[2].ToInt(); var t = arr[3].ToInt(); dataset.Add(new P { X = x, Y = y, I = i, T = t }); } FileUtil.SaveDataSetToFile(dataset, "temp.ser"); }
public DatasetToSerialize() { Dataset = new Points(); }
public IPoints GetClusterResult(Boundary grid) { // Collect used buckets and return the result var clusterPoints = new Points(); //O(m*n) foreach (var item in BucketsLookup) { var bucket = item.Value; if (!bucket.IsUsed) continue; if (bucket.Points.Count < AlgoConfig.MinClusterSize) clusterPoints.Data.AddRange(bucket.Points.Data); else { bucket.Centroid.C = bucket.Points.Count; clusterPoints.Add(bucket.Centroid); } } //var filtered = FilterDataset(clusterPoints, grid); // post filter data for client viewport //return filtered; //not working properly when zoomed far out. return clusterPoints; // return not post filtered }
// lat lon points data static void LatLonParse() { //http://download.geonames.org/export/dump/ var rand = new Random(); const string name = "cities1000"; var lines = FileUtil.ReadFile(string.Format("c:\\temp\\{0}.txt", name)); var dataset = new Points(); const int numOfType = 3; foreach (var line in lines) { var delimiters = new[] { ' ', '\t' }; var arr = line.Split(delimiters, StringSplitOptions.RemoveEmptyEntries); if (arr.Length < 10) continue; double? lon = null; double? lat = null; var id = arr[0].ToInt(); for (var i = 1; i < arr.Length - 2; i++) { double d1, d2, d3; var dp1 = Double.TryParse(arr[i], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d1); var dp2 = Double.TryParse(arr[i + 1], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d2); var dp3 = Double.TryParse(arr[i + 2], NumberStyles.Float, NumberFormatInfo.InvariantInfo, out d3); if (dp1 && dp2 && dp3) { lat = d2; lon = d3; break; } if (dp1 && dp2) { lat = d1; lon = d2; break; } } if (lon.HasValue && lat.HasValue && MathTool.IsLonValid(lon.Value) && MathTool.IsLatValid(lat.Value)) { dataset.Add(new P { X = lon.Value, Y = lat.Value, I = id, T = (rand.Next(numOfType) + 1) }); } } SaveCsvData(dataset, new FileInfo(string.Format("c:\\temp\\{0}.csv", name))); }
static MemoryDatabase() { Points = new Points(); PointsBackup = new Points(); }
// Post public JsonMarkersReply Markers( double nelat, double nelon, double swlat, double swlon, int zoomlevel, string filter, int sendid ) { var sw = new Stopwatch(); sw.Start(); var jsonReceive = new JsonGetMarkersReceive(nelat, nelon, swlat, swlon, zoomlevel, filter, sendid); var clusteringEnabled = jsonReceive.IsClusteringEnabled || AlgoConfig.AlwaysClusteringEnabledWhenZoomLevelLess > jsonReceive.Zoomlevel; JsonMarkersReply reply; jsonReceive.Viewport.ValidateLatLon(); // Validate google map viewport input (is always valid) jsonReceive.Viewport.Normalize(); // Get all points from memory IPoints points = MemoryDatabase.GetPoints(); if (jsonReceive.TypeFilterExclude.Count == AlgoConfig.MarkerTypes.Count) { // Filter all points = new Points(); // empty } else if (jsonReceive.TypeFilterExclude.Count > 0) { // Filter data by typeFilter value // Make new obj, don't overwrite obj data points = new Points { Data = points.Data .Where(p => jsonReceive.TypeFilterExclude.Contains(p.T) == false) .ToList() }; } // Create new instance for every ajax request with input all points and json data var clusterAlgo = new GridCluster(points, jsonReceive); // create polylines // Clustering if (clusteringEnabled && jsonReceive.Zoomlevel < AlgoConfig.ZoomlevelClusterStop) { // Calculate data to be displayed var clusterPoints = clusterAlgo.GetCluster(new ClusterInfo { ZoomLevel = jsonReceive.Zoomlevel, }); var converted = DataConvert(clusterPoints); // Prepare data to the client reply = new JsonMarkersReply { Markers = converted, Rid = sendid, Polylines = clusterAlgo.Lines, Msec = Sw(sw), }; // Return client data return reply; } // If we are here then there are no clustering // The number of items returned is restricted to avoid json data overflow IPoints filteredDataset = ClusterAlgorithmBase.FilterDataset(points, jsonReceive.Viewport); IPoints filteredDatasetMaxPoints = new Points { Data = filteredDataset.Data .Take(AlgoConfig.MaxMarkersReturned) .ToList() }; reply = new JsonMarkersReply { Markers = DataConvert(filteredDatasetMaxPoints), Rid = sendid, Polylines = clusterAlgo.Lines, Mia = filteredDataset.Count - filteredDatasetMaxPoints.Count, Msec = Sw(sw), }; return reply; }
// Post public JsonMarkersReply Markers( double nelat, double nelon, double swlat, double swlon, int zoomlevel, string filter, int sendid ) { var sw = new Stopwatch(); sw.Start(); var jsonReceive = new JsonGetMarkersReceive(nelat, nelon, swlat, swlon, zoomlevel, filter, sendid); var clusteringEnabled = jsonReceive.IsClusteringEnabled || AlgoConfig.AlwaysClusteringEnabledWhenZoomLevelLess > jsonReceive.Zoomlevel; JsonMarkersReply reply; jsonReceive.Viewport.ValidateLatLon(); // Validate google map viewport input (is always valid) jsonReceive.Viewport.Normalize(); // Get all points from memory IPoints points = MemoryDatabase.GetPoints(); if (jsonReceive.TypeFilterExclude.Count == AlgoConfig.MarkerTypes.Count) { // Filter all points = new Points(); // empty } else if (jsonReceive.TypeFilterExclude.Count > 0) { // Filter data by typeFilter value // Make new obj, don't overwrite obj data points = new Points { Data = points.Data .Where(p => jsonReceive.TypeFilterExclude.Contains(p.T) == false) .ToList() }; } // Create new instance for every ajax request with input all points and json data var clusterAlgo = new GridCluster(points, jsonReceive); // create polylines // Clustering if (clusteringEnabled && jsonReceive.Zoomlevel < AlgoConfig.ZoomlevelClusterStop) { // Calculate data to be displayed var clusterPoints = clusterAlgo.GetCluster(new ClusterInfo { ZoomLevel = jsonReceive.Zoomlevel, }); var converted = DataConvert(clusterPoints); // Prepare data to the client reply = new JsonMarkersReply { Markers = converted, Rid = sendid, Polylines = clusterAlgo.Lines, Msec = Sw(sw), }; // Return client data return(reply); } // If we are here then there are no clustering // The number of items returned is restricted to avoid json data overflow IPoints filteredDataset = ClusterAlgorithmBase.FilterDataset(points, jsonReceive.Viewport); IPoints filteredDatasetMaxPoints = new Points { Data = filteredDataset.Data .Take(AlgoConfig.MaxMarkersReturned) .ToList() }; reply = new JsonMarkersReply { Markers = DataConvert(filteredDatasetMaxPoints), Rid = sendid, Polylines = clusterAlgo.Lines, Mia = filteredDataset.Count - filteredDatasetMaxPoints.Count, Msec = Sw(sw), }; return(reply); }