/// <summary> /// Creates an ID-generator that is slightly faster and a bit more transparent. /// </summary> /// <param name="resolution">maximum duration in seconds for an increment in the id</param> /// <param name="alphabet">the (sorted) characters to be used in the ID generation</param> /// <param name="start">beginning of timeline</param> /// <param name="overflow_years">number of years after which the key length will increase by 1</param> public HagelSource(double resolution = Hagelkorn.Resolution.Seconds, string alphabet = DEFAULT_ALPHABET, DateTime?start = null, double overflow_years = 10) { if (start == null) { start = DEFAULT_START; } Alphabet = alphabet; Start = ((DateTime)start).ToUniversalTime(); TotalSeconds = overflow_years * 31536000; End = Start.AddSeconds(TotalSeconds); (Digits, Combinations, Resolution) = KeyLength(overflow_years, resolution, B); }
/// <summary> /// Limits the number of items to be processed. It allows processing 'maxAllowed' items every 'windowSeconds' seconds and if that /// limit is exceeded then the processing instruction that should be considered is 'deniedProcessInstruction' /// </summary> /// <param name="maxAllowed">max items allowed</param> /// <param name="windowSeconds"></param> /// <param name="deniedProcessInstruction"></param> public WindowLimiter(int maxAllowed, int windowSeconds, Constants.MessageProcessInstruction deniedProcessInstruction) { if (maxAllowed <= 0 || windowSeconds <= 0) { throw new ArgumentException("Invalid parameters; cannot be 0"); } MaxAllowed = maxAllowed; WindowSeconds = windowSeconds; DeniedProcessInstruction = deniedProcessInstruction; Start = DateTime.Now; End = Start.AddSeconds(windowSeconds); }
/// <summary> /// 根据刻度频度和刻度类型计算刻度集合 /// </summary> /// <param name="freq">刻度频度</param> /// <param name="tickType">刻度类型</param> private void CalculateTicks(TimelineTickFrequency freq, TimelineTickType tickType) { // if there is a viewport defined, then just generate double spanExtentInSeconds; // 跨度范围 秒数 DateTime spanStart; // 跨度范围 开始时间 if (double.IsInfinity(ViewportExtent)) { spanExtentInSeconds = (End - Start).TotalSeconds; spanStart = Start; } else { spanStart = Start.AddSeconds(ViewportOffset * (1 / PixelsPerSecond)); spanExtentInSeconds = ViewportExtent * (1 / PixelsPerSecond); } double tickCount = spanExtentInSeconds / freq.Extent.TotalSeconds; // calculate the first tick var tick = DateTime.MinValue.AddSeconds( Math.Ceiling((spanStart - DateTime.MinValue).TotalSeconds / freq.Extent.TotalSeconds) * freq.Extent.TotalSeconds ); for (int i = 0; i < tickCount; i++) { AddTick( new TimelineTick() { Time = tick, TickType = tickType } ); tick = tick.AddSeconds(freq.Extent.TotalSeconds); } }
public override bool Execute(DotSpatial.Data.ICancelProgressHandler cancelProgressHandler) { var fs_source = FeatureSet.Open(SourceFeatureFile); var fs_target = FeatureSet.Open(TargetFeatureFile); CSVFileStream csv = new CSVFileStream(DataFileName); var ts_data = csv.Load <double>(); cancelProgressHandler.Progress("Package_Tool", 1, "Time series at known sites loaded"); int nsource_sites = fs_source.DataTable.Rows.Count; int ntar_sites = fs_target.DataTable.Rows.Count; int nstep = ts_data.Size[0]; int nsite_data = ts_data.Size[1]; int progress = 0; double sumOfDis = 0; double sumOfVa = 0; if (nsite_data != nsource_sites) { cancelProgressHandler.Progress("Package_Tool", 100, "the number of sites in the data file dose not match to that in the source feature file"); return(false); } else { if (Neighbors > nsource_sites) { Neighbors = nsource_sites; } var known_sites = new Site[nsource_sites]; DataCube <float> mat = new DataCube <float>(1, nstep, ntar_sites); mat.DateTimes = new DateTime[nstep]; mat.Name = OutputMatrix; mat.TimeBrowsable = true; mat.AllowTableEdit = false; double[][] xvalues = new double[nsource_sites][]; double[] yvalues = new double[nsource_sites]; for (int i = 0; i < nsource_sites; i++) { var cor = fs_source.Features[i].Geometry.Coordinate; var ele = double.Parse(fs_source.Features[i].DataRow[_ValueField].ToString()); xvalues[i] = new double[3]; known_sites[i] = new Site() { LocalX = cor.X, LocalY = cor.Y, ID = i, Elevation = ele }; xvalues[i][0] = cor.X; xvalues[i][1] = cor.Y; xvalues[i][2] = ele; } for (int i = 0; i < nsource_sites; i++) { var count = 0; double sum = 0; for (int t = 0; t < nstep; t++) { if (ts_data.GetValue(t, i) < MaximumValue) { sum += ts_data.GetValue(t, i); count++; } } yvalues[i] = sum / count; } MultipleLinearRegression mlineRegrsn = new MultipleLinearRegression(xvalues, yvalues, true); Matrix result = new Matrix(); mlineRegrsn.ComputeFactorCoref(result); double[] coeffs = result[0, Matrix.mCol]; var cx = coeffs[0]; var cy = coeffs[1]; var ce = coeffs[2]; cancelProgressHandler.Progress("Package_Tool", 2, "Regression coefficients calculated"); for (int i = 0; i < ntar_sites; i++) { var cor = fs_target.Features[i].Geometry.Coordinate; var site_intep = new Site() { LocalX = cor.X, LocalY = cor.Y, ID = i, Elevation = double.Parse(fs_target.Features[i].DataRow[_ValueField].ToString()) }; var neighborSites = FindNeareastSites(Neighbors, known_sites, site_intep); for (int j = 0; j < nstep; j++) { sumOfDis = 0; sumOfVa = 0; foreach (var nsite in neighborSites) { var vv = ts_data.GetValue(j, nsite.ID); if (vv < MaximumValue) { double temp = 1 / System.Math.Pow(nsite.Distance, Power); double gd = (site_intep.LocalX - nsite.LocalX) * cx + (site_intep.LocalY - nsite.LocalY) * cy + (site_intep.Elevation - nsite.Elevation) * ce; sumOfVa += vv * temp; sumOfDis += temp; } } if (sumOfDis != 0) { mat[0, j, i] = (float)(sumOfVa / sumOfDis); if (!AllowNegative && mat[0, j, i] < 0) { mat[0, j, i] = MinmumValue; } } else { mat[0, j, i] = MinmumValue; } } progress = (i + 1) * 100 / ntar_sites; cancelProgressHandler.Progress("Package_Tool", progress, "Caculating Cell: " + (i + 1)); } for (int j = 0; j < nstep; j++) { mat.DateTimes[j] = Start.AddSeconds(j * TimeStep); } cancelProgressHandler.Progress("Package_Tool", 100, ""); Workspace.Add(mat); fs_source.Close(); fs_target.Close(); return(true); } }
public override bool Execute(DotSpatial.Data.ICancelProgressHandler cancelProgressHandler) { IFeatureSet fs_source = null; IFeatureSet fs_target = null; if (SourceFeatureLayer != null) { fs_source = SourceFeatureLayer.DataSet as IFeatureSet; } else if (TypeConverterEx.IsNotNull(SourceFeatureFile)) { fs_source = FeatureSet.Open(SourceFeatureFile); } if (TargetFeatureLayer != null) { fs_target = TargetFeatureLayer.DataSet as IFeatureSet; } else if (TypeConverterEx.IsNotNull(TargetFeatureFile)) { fs_target = FeatureSet.Open(TargetFeatureFile); } if (fs_source == null || fs_target == null) { cancelProgressHandler.Progress("Package_Tool", 100, "Failed. The inputs are invalid."); return(false); } CSVFileStream csv = new CSVFileStream(DataFileName); var ts_data = csv.Load <double>(); cancelProgressHandler.Progress("Package_Tool", 1, "Time series at known sites loaded"); int nsource_sites = fs_source.DataTable.Rows.Count; int ntar_sites = fs_target.DataTable.Rows.Count; int nstep = ts_data.Size[0]; int nsite_data = ts_data.Size[1]; int progress = 0; int count = 1; double sumOfDis = 0; double sumOfVa = 0; if (nsite_data != nsource_sites) { cancelProgressHandler.Progress("Package_Tool", 100, "the number of sites in the data file dose not match to that in the source feature file"); return(false); } else { if (Neighbors > nsource_sites) { Neighbors = nsource_sites; } var known_sites = new Site[nsource_sites]; DataCube <float> mat = new DataCube <float>(1, nstep, ntar_sites); mat.DateTimes = new DateTime[nstep]; mat.Name = OutputDataCube; mat.TimeBrowsable = true; mat.AllowTableEdit = false; for (int i = 0; i < nsource_sites; i++) { var cor = fs_source.Features[i].Geometry.Coordinate; known_sites[i] = new Site() { LocalX = cor.X, LocalY = cor.Y, ID = i }; } for (int i = 0; i < ntar_sites; i++) { var cor = fs_target.Features[i].Geometry.Coordinate; var site_intep = new Site() { LocalX = cor.X, LocalY = cor.Y, ID = i }; var neighborSites = FindNeareastSites(Neighbors, known_sites, site_intep); for (int j = 0; j < nstep; j++) { sumOfDis = 0; sumOfVa = 0; foreach (var nsite in neighborSites) { var vv = ts_data.GetValue(j, nsite.ID); if (vv < MaximumValue) { double temp = 1 / System.Math.Pow(nsite.Distance, Power); sumOfVa += vv * temp; sumOfDis += temp; } } if (sumOfDis != 0) { mat[0, j, i] = (float)(sumOfVa / sumOfDis); if (!AllowNegative && mat[0, j, i] < 0) { mat[0, j, i] = MinmumValue; } } else { mat[0, j, i] = MinmumValue; } } progress = (i + 1) * 100 / ntar_sites; if (progress > count) { cancelProgressHandler.Progress("Package_Tool", progress, "Caculating Cell: " + (i + 1)); count++; } } for (int j = 0; j < nstep; j++) { mat.DateTimes[j] = Start.AddSeconds(j * TimeStep); } cancelProgressHandler.Progress("Package_Tool", 100, ""); Workspace.Add(mat); fs_source.Close(); fs_target.Close(); return(true); } }
public override bool Execute(DotSpatial.Data.ICancelProgressHandler cancelProgressHandler) { int progress = 0; string line = ""; float temp = 0; int count = 1; int nstep = 0; int ncell = 0; var date = DateTime.Now; StreamReader sr = new StreamReader(DataFileName); if (ContainsHeader) { line = sr.ReadLine(); } line = sr.ReadLine(); var buf = TypeConverterEx.Split <string>(line.Trim()); if (ContainsDateTime) { buf = TypeConverterEx.SkipSplit <string>(line.Trim(), 1); } ncell = buf.Length; nstep++; while (!sr.EndOfStream) { line = sr.ReadLine(); if (!TypeConverterEx.IsNull(line)) { nstep++; } } sr.Close(); sr = new StreamReader(DataFileName); string var_name = Path.GetFileNameWithoutExtension(DataFileName); var mat_out = new DataCube <float>(1, nstep, ncell); mat_out.Name = OutputMatrix; mat_out.AllowTableEdit = false; mat_out.TimeBrowsable = true; mat_out.Variables = new string[] { var_name }; mat_out.DateTimes = new DateTime[nstep]; if (ContainsHeader) { line = sr.ReadLine(); } if (ContainsDateTime) { for (int t = 0; t < nstep; t++) { line = sr.ReadLine(); var strs = TypeConverterEx.Split <string>(line); DateTime.TryParse(strs[0], out date); mat_out.DateTimes[t] = date; for (int i = 0; i < ncell; i++) { float.TryParse(strs[i + 1], out temp); mat_out[0, t, i] = temp; } if (progress > count) { progress = t * 100 / nstep; cancelProgressHandler.Progress("Package_Tool", progress, "Processing step:" + (t + 1)); } } } else { for (int t = 0; t < nstep; t++) { line = sr.ReadLine(); var vec = TypeConverterEx.Split <float>(line); mat_out[0, t.ToString(), ":"] = vec; mat_out.DateTimes[t] = Start.AddSeconds(Interval * t); if (progress > count) { progress = t * 100 / nstep; cancelProgressHandler.Progress("Package_Tool", progress, "Processing step:" + (t + 1)); } } } Workspace.Add(mat_out); return(true); }