public bool ReadNextHistorianBlock(out DataPoint[] points) { try { CurrentTimestamp = new DateTime((long)m_dataPoint.Timestamp); ulong currentTimestamp = m_dataPoint.Timestamp; int timeComparison; bool readSuccess = true; // Create a new data block for current timestamp and load first/prior point m_dataBlock.Clear(); m_dataBlock[m_dataPoint.PointID] = m_dataPoint.Clone(); // Load remaining data for current timestamp do { // Scan to next record if (!m_historianClient.ReadNext(m_dataPoint)) { readSuccess = false; break; } timeComparison = DataPoint.CompareTimestamps(m_dataPoint.Timestamp, currentTimestamp, m_settings.FrameRate); if (timeComparison == 0) { m_dataBlock[m_dataPoint.PointID] = m_dataPoint.Clone(); } }while (timeComparison == 0); // Finished with data read if (!readSuccess) { ShowMessage(">>> End of data in range encountered..."); return(false); } return(true); } catch (Exception ex) { ShowMessage($"!!! Failure during historian read: {ex.Message}"); Log.Publish(MessageLevel.Error, "HistorianDataRead", "Failed while reading data from the historian", exception: ex); return(false); } finally { PercentComplete = (int)((1.0D - (new Ticks(m_endTime.Ticks - (long)m_dataPoint.Timestamp).ToSeconds() / m_timeRange)) * 100.0D); points = m_dataBlock.Values.ToArray(); } }
// public DataPoint ApplyModifiers(DataPoint point) { DataPoint NewPoint; double[] ts_offsets = null, ts_scales = null; if (point == null) { return(null); } NewPoint = point.Clone(); //Apply timeoffset. NewPoint.Timestamp += TimeOffset; if (ScaleWithTimeSeries != null) { ts_scales = ScaleWithTimeSeries.GetCurrentValues(); } if (OffsetWithTimeSeries != null) { ts_offsets = OffsetWithTimeSeries.GetCurrentValues(); } for (int i = 0; i < NewPoint.Values.Length; i++) { //Apply local offset if (Offsets != null && (Offsets.Length >= (i + 1))) { // Debug.Log (i); // Debug.Log (NewPoint.Values.Length); // Debug.Log (Offsets.Length); NewPoint.Values[i] += Offsets[i]; } //Apply local rescale if (Rescales != null && (Rescales.Length >= (i + 1))) { NewPoint.Values[i] *= Rescales[i]; } //Apply Timeseries rescale. if (ts_scales != null && (ts_scales.Length >= (i + 1))) { NewPoint.Values[i] *= ts_scales[i]; } //Apply Timeseries offset. if (ts_offsets != null && (ts_offsets.Length >= (i + 1))) { NewPoint.Values[i] += ts_offsets[i]; } } return(NewPoint); }
virtual public void CreateCounterRateInterpolation(double ts, double timefactor) { DataPoint dp = LastData.Clone(); double deltaT = ts - dp.Timestamp; dp.Values [1] = dp.Values [1] + dp.Values [0] * deltaT / timefactor; dp.Timestamp = ts; UpdateAllTargets(dp); }
public void CloneMethod() { DataPoint dp = new DataPoint(); Assert.AreEqual(false, dp.IsEmpty, "A1"); Assert.AreEqual("DataPoint", dp.Name, "A2"); Assert.AreEqual(0, dp.XValue, "A3"); Assert.AreEqual(new double[] { 0.0d }, dp.YValues, "A4"); DataPoint dp2 = (DataPoint)dp.Clone(); Assert.AreEqual(false, dp2.IsEmpty, "A5"); Assert.AreEqual("DataPoint", dp2.Name, "A6"); Assert.AreEqual(0, dp2.XValue, "A7"); Assert.AreEqual(new double[] { 0.0d }, dp2.YValues, "A8"); }
public void CloneShouldCreateNewIdenticalDataPoint() { var dataPoint = new DataPoint(new double[] { 1, 2, 3, 4 }); var clonedDataPoint = dataPoint.Clone(); Assert.AreNotSame(dataPoint, clonedDataPoint); Assert.AreNotSame(dataPoint.Coordinates, clonedDataPoint.Coordinates); Assert.AreEqual(4, clonedDataPoint.Dimensions); Assert.AreEqual(1D, clonedDataPoint[0]); Assert.AreEqual(2D, clonedDataPoint[1]); Assert.AreEqual(3D, clonedDataPoint[2]); Assert.AreEqual(4D, clonedDataPoint[3]); }
/// <summary> /// Updates the balance chart. /// </summary> private void UpdateBalanceChart() { BalanceChart.Series[0].Points.Clear(); // Set the data points foreach (WalletJournal journal in m_ccpCharacter.WalletJournal.OrderByDescending(journal => journal.Date)) { using (DataPoint dataPoint = new DataPoint()) { dataPoint.SetValueXY(journal.Date.ToLocalTime(), journal.Balance); dataPoint.ToolTip = $"{journal.Date.ToLocalTime():G}{Environment.NewLine}{journal.Balance:N2} ISK"; BalanceChart.Series[0].Points.Add(dataPoint.Clone()); } } }
public DataPoint ApplyModifiers(DataPoint point) { point = base.ApplyModifiers(point); if (Manipulations.Count == 0) { return(point); } DataPoint newpoint = point.Clone(); foreach (Manipulation manipulation in Manipulations) { manipulation.AddOffsets(point, newpoint); } return(newpoint); }
/// <summary> /// Updates the amount chart. /// </summary> private void UpdateAmountChart() { AmountChart.Series[0].Points.Clear(); AmountChart.Series[1].Points.Clear(); // Set the data points for the first chart foreach (WalletJournal journal in m_ccpCharacter.WalletJournal.OrderByDescending(journal => journal.Date)) { using (DataPoint dataPoint = new DataPoint()) { dataPoint.SetValueXY(journal.Date.ToLocalTime(), journal.Amount); dataPoint.Color = journal.Amount < 0 ? Color.DarkRed : Color.DarkGreen; dataPoint.ToolTip = $"{journal.Date.ToLocalTime():G}{Environment.NewLine}{journal.Amount:N2} ISK"; // Add the data point to series AmountChart.Series[0].Points.Add(dataPoint.Clone()); } } // Set the data points for the second chart using (DataPoint positiveSumDataPoint = new DataPoint()) { decimal positiveSum = m_ccpCharacter.WalletJournal.Where(journal => journal.Amount > 0).Sum(journal => journal.Amount); positiveSumDataPoint.SetValueXY(0, positiveSum); positiveSumDataPoint.Color = Color.DarkGreen; positiveSumDataPoint.ToolTip = $"Inflow{Environment.NewLine}{positiveSum:N2} ISK"; // Add the data point to series AmountChart.Series[1].Points.Add(positiveSumDataPoint.Clone()); } using (DataPoint negativeSumDataPoint = new DataPoint()) { decimal negativeSum = m_ccpCharacter.WalletJournal.Where(journal => journal.Amount < 0).Sum(journal => journal.Amount); negativeSumDataPoint.SetValueXY(0, negativeSum); negativeSumDataPoint.Color = Color.DarkRed; negativeSumDataPoint.ToolTip = $"Outflow{Environment.NewLine}{negativeSum:N2} ISK"; // Add the data point to series AmountChart.Series[1].Points.Add(negativeSumDataPoint.Clone()); } }
private void plotData(Vector point, int seriesIdx, DataPoint dataPointStyle = null, bool markLastPt = true) { Series s = chart.Series[seriesIdx]; dataPointStyle = dataPointStyle ?? new DataPoint(); if (markLastPt) { tryRemoveLastMarker(seriesIdx, dataPointStyle); } var dataPt = dataPointStyle.Clone(); dataPt.XValue = point.x; dataPt.YValues = new double[] { point.y }; s.Points.Add(dataPt); if (markLastPt) { addMarker(seriesIdx); } }
public void Inject() { foreach (DataNode.Subscription Sub in Targets) { Debug.Log("Injecting!"); DataPoint Data2 = Data.Clone(); if (Sub.Source == null) { Sub.Source = this; } if (Randomize) { for (int i = 0; i < Data.Values.Length; i++) { Data2.Values [i] = Random.Range(0, (float)Data.Values [i]); } } Sub.Target.TimeDataUpdate(Sub, Data2); } }
protected override void AddEntries() { if (GetGrouping == null) { return; } // clear old data foreach (var s in Series) { s.Points.Clear(); } // update axis var x = ChartAreas[0].AxisX; x.IntervalOffset = 0; switch (GetGrouping()) { case GroupingType.OneDay: x.IntervalType = DateTimeIntervalType.Days; x.Interval = 1; break; case GroupingType.OneWeek: x.IntervalType = DateTimeIntervalType.Weeks; x.IntervalOffset = -6; x.Interval = 1; break; case GroupingType.TwoWeeks: x.IntervalType = DateTimeIntervalType.Weeks; x.IntervalOffset = -6; x.Interval = 2; break; case GroupingType.OneMonth: x.IntervalType = DateTimeIntervalType.Months; x.Interval = 1; break; case GroupingType.ThreeMonths: x.IntervalType = DateTimeIntervalType.Months; x.Interval = 3; break; case GroupingType.SixMonths: x.IntervalType = DateTimeIntervalType.Months; x.Interval = 6; break; case GroupingType.OneYear: x.IntervalType = DateTimeIntervalType.Years; x.Interval = 1; break; case GroupingType.Count: break; default: throw new ArgumentOutOfRangeException(); } ChartAreas[0].CursorX.Interval = x.Interval; ChartAreas[0].CursorX.IntervalType = x.IntervalType; //x.Interval = grouping.Item2; //if (grouping.Item1 == DateInterval.Day) // if (grouping.Item2 == 5 || grouping.Item2 == 7) // x.IntervalOffset = -1; // else if (grouping.Item2 == 14) // x.IntervalOffset = -9; // else if (grouping.Item2 == 21) // x.IntervalOffset = -12; // add entries var entries = GetEntries(); if (entries.Length == 0) { return; } var intervalStart = GetStartOfInterval(entries[0].Date ?? DateTime.MaxValue); var intervalEnd = GetEndOfInterval(intervalStart); var previousIntervalStart = intervalStart.AddSeconds(-1); var points = new List <Tuple <DateTime, DateTime, DateTime, DateTime, DateTime, DateTime, string> > { new Tuple <DateTime, DateTime, DateTime, DateTime, DateTime, DateTime, string>(intervalStart, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, "") }; foreach (var e in entries.Cast <TrainingEntry>()) { var last = points.LastOrDefault(); if (last != null && (e.Date ?? DateTime.MinValue) < last.Item1) { throw new Exception("entries are not ordered"); } if (e.HrZones == null) { throw new Exception("entry has no zonedata"); } // are we still in same interval? if (last != null && e.Date < intervalEnd) { // add to last tuple points[points.Count - 1] = new Tuple <DateTime, DateTime, DateTime, DateTime, DateTime, DateTime, string>(last.Item1, last.Item2.Add(e.HrZones.Value.Zone1), last.Item3.Add(e.HrZones.Value.Zone2), last.Item4.Add(e.HrZones.Value.Zone3), last.Item5.Add(e.HrZones.Value.Zone4), last.Item6.Add(e.HrZones.Value.Zone5), last.Item7 + (last.Item7 != "" ? "\n" : "") + e); } else { // update end of interval intervalStart = intervalEnd; intervalEnd = GetEndOfInterval(intervalEnd); while (e.Date >= intervalEnd) { // add empty tuple points.Add(new Tuple <DateTime, DateTime, DateTime, DateTime, DateTime, DateTime, string>(intervalStart, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, "")); intervalStart = intervalEnd; intervalEnd = GetEndOfInterval(intervalEnd); } // add new tuple points.Add(new Tuple <DateTime, DateTime, DateTime, DateTime, DateTime, DateTime, string>(intervalStart, DateTime.MinValue.Add(e.HrZones.Value.Zone1), DateTime.MinValue.Add(e.HrZones.Value.Zone2), DateTime.MinValue.Add(e.HrZones.Value.Zone3), DateTime.MinValue.Add(e.HrZones.Value.Zone4), DateTime.MinValue.Add(e.HrZones.Value.Zone5), e.ToString())); } } // add zero-point before var zeroPoint = new DataPoint(previousIntervalStart.ToOADate(), 0); foreach (var s in Series.Where(s => s.Name.StartsWith("Zone "))) { s.Points.Add(zeroPoint); } var max = double.MinValue; foreach (var t in points) { var ts = new[] { t.Item2, t.Item3, t.Item4, t.Item5, t.Item6 }; var sum = 0.0; for (var i = 0; i < 5; i++) { if (ts[i].DayOfYear > 1) { sum += ts[i].DayOfYear - 1; ts[i] = ts[i].AddDays(-ts[i].DayOfYear + 1); } sum += ts[i].ToOADate(); Series["Zone " + (i + 1)].Points.Add(new DataPoint(t.Item1.ToOADate(), sum) { ToolTip = t.Item7 }); } if (sum > max) { max = sum; } } var foo = DateTime.FromOADate(max); foo = foo.AddHours(foo.Minute > 30 ? 2 : 1); foo = foo.AddMinutes(-foo.Minute); foo = foo.AddSeconds(-foo.Second); ChartAreas[0].AxisY.Maximum = foo.ToOADate(); // add zero-point after zeroPoint = new DataPoint(intervalStart.AddSeconds(1).ToOADate(), 0); foreach (var s in Series.Where(s => s.Name.StartsWith("Zone "))) { s.Points.Add(zeroPoint.Clone()); } }
// Internal Functions private void ReadArchive(object state) { try { double timeRange = (m_settings.EndTime - m_settings.StartTime).TotalSeconds; long receivedPoints = 0; long processedDataBlocks = 0; long duplicatePoints = 0; Ticks operationTime; Ticks operationStartTime; DataPoint point = new DataPoint(); DateTime firstTimestamp = new DateTime(0L); DateTime lastTimestamp = new DateTime(0L); using (Algorithm algorithm = new Algorithm()) { algorithm.ShowMessage = ShowUpdateMessage; algorithm.MessageInterval = m_settings.MessageInterval; algorithm.StartTime = m_settings.StartTime; algorithm.EndTime = m_settings.EndTime; algorithm.FrameRate = m_settings.FrameRate; algorithm.TimeRange = timeRange; algorithm.Log = m_log; // Load historian meta-data ShowUpdateMessage(">>> Loading source connection metadata..."); operationStartTime = DateTime.UtcNow.Ticks; algorithm.Metadata = MetadataRecord.Query(m_settings.HostAddress, m_settings.MetadataPort, m_settings.MetadataTimeout); operationTime = DateTime.UtcNow.Ticks - operationStartTime; ShowUpdateMessage("*** Metadata Load Complete ***"); ShowUpdateMessage($"Total metadata load time {operationTime.ToElapsedTimeString(3)}..."); ShowUpdateMessage(">>> Processing filter expression for metadata..."); operationStartTime = DateTime.UtcNow.Ticks; MeasurementKey[] inputKeys = AdapterBase.ParseInputMeasurementKeys(MetadataRecord.Metadata, false, textBoxPointList.Text, "MeasurementDetail"); List <ulong> pointIDList = inputKeys.Select(key => (ulong)key.ID).ToList(); operationTime = DateTime.UtcNow.Ticks - operationStartTime; // Allow algorithm to augment (or even replace) point ID list as provided by user algorithm.AugmentPointIDList(pointIDList); ShowUpdateMessage($">>> Historian read will be for {pointIDList.Count:N0} points based on provided meta-data expression and algorithm augmentation."); // Reduce metadata to filtered point list ShowUpdateMessage($">>> Reducing metadata to the {pointIDList.Count:N0} defined points..."); List <MetadataRecord> records = new List <MetadataRecord>(); foreach (ulong pointID in pointIDList) { MetadataRecord record = algorithm.Metadata.FirstOrDefault(metadata => metadata.PointID == pointID); if ((object)record != null) { records.Add(record); } } algorithm.Metadata = records; ShowUpdateMessage("*** Filter Expression Processing Complete ***"); ShowUpdateMessage($"Total filter expression processing time {operationTime.ToElapsedTimeString(3)}..."); ShowUpdateMessage(">>> Initializing algorithm..."); algorithm.Initialize(); ShowUpdateMessage(">>> Starting archive read..."); // Start historian data read operationStartTime = DateTime.UtcNow.Ticks; using (SnapDBClient historianClient = new SnapDBClient(m_settings.HostAddress, m_settings.DataPort, m_settings.InstanceName, m_settings.StartTime, m_settings.EndTime, m_settings.FrameRate, pointIDList)) { // Scan to first record if (!historianClient.ReadNext(point)) { throw new InvalidOperationException("No data for specified time range in openHistorian connection!"); } ulong currentTimestamp; receivedPoints++; while (!m_formClosing) { int timeComparison; bool readSuccess = true; // Create a new data block for current timestamp and load first/prior point Dictionary <ulong, DataPoint> dataBlock = new Dictionary <ulong, DataPoint> { [point.PointID] = point.Clone() }; currentTimestamp = point.Timestamp; // Load remaining data for current timestamp do { // Scan to next record if (!historianClient.ReadNext(point)) { readSuccess = false; break; } receivedPoints++; timeComparison = DataPoint.CompareTimestamps(point.Timestamp, currentTimestamp, m_settings.FrameRate); if (timeComparison == 0) { // Timestamps are compared based on configured frame rate - if archived data rate is // higher than configured frame rate, then data block will contain only latest values if (dataBlock.ContainsKey(point.PointID)) { duplicatePoints++; } dataBlock[point.PointID] = point.Clone(); } }while (timeComparison == 0); // Finished with data read if (!readSuccess) { ShowUpdateMessage(">>> End of data in range encountered..."); break; } if (++processedDataBlocks % m_settings.MessageInterval == 0) { ShowUpdateMessage($"{Environment.NewLine}{receivedPoints:N0} points{(duplicatePoints > 0 ? $", which included {duplicatePoints:N0} duplicates," : "")} read so far averaging {receivedPoints / (DateTime.UtcNow.Ticks - operationStartTime).ToSeconds():N0} points per second."); UpdateProgressBar((int)((1.0D - new Ticks(m_settings.EndTime.Ticks - (long)point.Timestamp).ToSeconds() / timeRange) * 100.0D)); } try { lastTimestamp = new DateTime((long)currentTimestamp); if (firstTimestamp.Ticks == 0L) { firstTimestamp = lastTimestamp; } // Analyze data block algorithm.Execute(lastTimestamp, dataBlock.Values.ToArray()); } catch (Exception ex) { ShowUpdateMessage($"ERROR: Algorithm exception: {ex.Message}"); m_log.Publish(MessageLevel.Error, "AlgorithmError", "Failed while processing data from the historian", exception: ex); } } operationTime = DateTime.UtcNow.Ticks - operationStartTime; if (m_formClosing) { ShowUpdateMessage("*** Historian Read Canceled ***"); UpdateProgressBar(0); } else { ShowUpdateMessage("*** Historian Read Complete ***"); UpdateProgressBar(100); } algorithm.Complete(); // Show some operational statistics long expectedPoints = (long)(timeRange * m_settings.FrameRate * algorithm.Metadata.Count); double dataCompleteness = Math.Round(receivedPoints / (double)expectedPoints * 100000.0D) / 1000.0D; string overallSummary = $"Total processing time {operationTime.ToElapsedTimeString(3)} at {receivedPoints / operationTime.ToSeconds():N0} points per second.{Environment.NewLine}" + $"{Environment.NewLine}" + $" Meta-data points: {algorithm.Metadata.Count}{Environment.NewLine}" + $" Time-span covered: {timeRange:N0} seconds: {Ticks.FromSeconds(timeRange).ToElapsedTimeString(2)}{Environment.NewLine}" + $" Processed timestamps: {processedDataBlocks:N0}{Environment.NewLine}" + $" Expected points: {expectedPoints:N0} @ {m_settings.FrameRate:N0} samples per second{Environment.NewLine}" + $" Received points: {receivedPoints:N0}{Environment.NewLine}" + $" Duplicate points: {duplicatePoints:N0}{Environment.NewLine}" + $" Data completeness: {dataCompleteness:N3}%{Environment.NewLine}" + $" First timestamp with data: {firstTimestamp:yyyy-MM-dd HH:mm:ss.fff}{Environment.NewLine}" + $" Last timestamp with data: {lastTimestamp:yyyy-MM-dd HH:mm:ss.fff}{Environment.NewLine}"; ShowUpdateMessage(overallSummary); } } } catch (Exception ex) { ShowUpdateMessage($"!!! Failure during historian read: {ex.Message}"); m_log.Publish(MessageLevel.Error, "HistorianDataRead", "Failed while reading data from the historian", exception: ex); } finally { SetGoButtonEnabledState(true); } }