public DatePlus(MultipleTimeseries data, bool from = true) { if (from) { mDate = mDate = new DateTime(data.getStartDate().Ticks); } else { mDate = mDate = new DateTime(data.getEndDate().Ticks); } mUnit = data.mTimeUnit; mZone = TimeZoneInfo.Utc; }
public Graph(MultipleTimeseries dataSet, Size canvasSize, String title, String titleX, String titleY, bool monthlyGraph = false, bool plotAccumulative = false) { this.mDataset = dataSet; this.mPlotAccumulative = plotAccumulative; this.mMonthlyGraph = monthlyGraph; this.mMinX = DateTime.Now; this.mMaxX = DateTime.Now; this.mMinY = 0; this.mMaxY = 0; this.mGraphItems = new List <GraphItem>(); this.mCanvasSize = canvasSize; this.mTitle = title; this.mTitleX = titleX; this.mTitleY = titleY; this.mGraphOffset = new Point((int)Math.Max(50, Math.Min(200, mCanvasSize.Width * 0.1f)), (int)Math.Max(50, Math.Min(200, mCanvasSize.Height * 0.1f))); Initialise(); }
public List <float[]> getTimeseries(DateTime from, DateTime to, ExtractionPoint[] points, TimeUnit unit) { TimePeriod period = new TimePeriod(); period.from = from; period.to = to; period.zone = TimeZoneInfo.Utc; period = stripPrecision(period, unit); int steps = MultipleTimeseries.CalcSteps(period, unit); List <float[]> dataList = new List <float[]>(); for (int j = 0; j < points.Length; ++j) { float[] data = new float[steps]; DateTime iter = from; for (int i = 0; i < steps; ++i) { if (mFilenameLookup.ContainsKey(iter)) { data[i] = mFilenameLookup[iter].getData(points[j].longitude, points[j].latitude); } else { data[i] = -9999.9f; } iter = MultipleTimeseries.incrementDateTime(iter, unit, 1); } dataList.Add(data); } return(dataList); }
public void Extract(String outDir) { // Get data for longitude and latitude List <float[]> queryData = mGriddedDataset.getTimeseries(mExtractionPoints, mTimeUnit); // Check dataset returned for each station if (queryData.Count == mExtractionPoints.Length) { // Create data set mMultipleTimeseries = new MultipleTimeseries(mGriddedDataset.getFromDate(), mGriddedDataset.getToDate(), mTimeUnit); // Scale values for (int i = 0; i < queryData.Count; ++i) { for (int j = 0; j < queryData[i].Length; ++j) { queryData[i][j] *= (float)mExtractionPoints[i].weight; } } // Find unique stations string[] uniqueStations = mExtractionPoints.Select(v => v.siteName).Distinct().ToArray(); string[] siteNames = mExtractionPoints.Select(v => v.siteName).ToArray(); int nextIndex; foreach (String s in uniqueStations) { // Find first occurance int index = Array.IndexOf(siteNames, s); // Add all other indices and compute normalise factor nextIndex = index; float[] normalise = new float[queryData[index].Length]; // unique normalise value for each data point in case of missing values for (int i = 0; i < queryData[index].Length; ++i) { if (queryData[index][i] < 0) // if first station value is invalid, set it to zero instead of missing and set normalise to zero { queryData[index][i] = 0; normalise[i] = 0; } else // otherwise begin the normalise constant { normalise[i] = (float)mExtractionPoints[index].weight; } } // Find next occurance nextIndex = Array.IndexOf(siteNames, s, nextIndex + 1); while (nextIndex != -1) { // Combine data sources for each point and compute normalisation for (int i = 0; i < queryData[index].Length; ++i) { // if valid data point, add it to the normalise value if (queryData[nextIndex][i] >= 0) { queryData[index][i] += queryData[nextIndex][i]; normalise[i] += (float)mExtractionPoints[nextIndex].weight; } } // Find next occurance nextIndex = Array.IndexOf(siteNames, s, nextIndex + 1); } // Normalise data for (int i = 0; i < queryData[index].Length; ++i) { if (normalise[i] > 0) { queryData[index][i] /= normalise[i]; } else { queryData[index][i] = -9999.9f; } } // Add to data set mMultipleTimeseries.AddColumn(new SingleTimeseries(s, queryData[index])); } // Save data mMultipleTimeseries.writeToFile(outDir + Path.DirectorySeparatorChar + "Data.csv"); } }