/// <summary> /// Maps X coordinate to primary series sample /// </summary> public ITimeSeriesSample MapXToSample(int xcoord) { if (m_Series == null) { return(null); } var maxSampleWidth = m_Series.GetMaxSampleWidth(); var data = m_Series.Data.Skip(m_HScrollPosition); var x = VRulerPosition == VRulerPosition.Left ? m_VRulerWidth + 1 : 1; if (m_Zoom != 1.0f) { x = (int)(x / m_Zoom); xcoord = (int)(xcoord / m_Zoom); } var xcutof = VRulerPosition == VRulerPosition.Right ? Width - 1 - VRulerWidth : Width; ITimeSeriesSample prior = null; foreach (var sample in data) { if (xcoord < x) { return(prior); } x += maxSampleWidth; x++; //margin prior = sample; } return(prior); }
public Tick(ITimeSeriesSample sample, int x, bool warp, bool dayChange) { Sample = sample; X = x; Warp = warp; DayChange = dayChange; }
public override void AggregateSample(ITimeSeriesSample sample) { if (sample==null) return; if (sample is CandleSample) this.AggregateSample( sample as CandleSample ); else throw new FinancialException("{0}.AggergateSample({1}) unsupported".Args(GetType().Name, sample.GetType().Name)); }
public override void AggregateSample(ITimeSeriesSample sample) { if (sample == null) return; if (sample is CandleSample) this.AggregateSample(sample as CandleSample); else throw new Exception(string.Format("{0}.AggergateSample({1}) unsupported", GetType().Name, sample.GetType().Name)); }
internal ChartPaneMouseEventArgs( MouseEventType type, TimeSeriesChart chart, PlotPane pane, MouseEventArgs mouse, ITimeSeriesSample sample, float value) { this.EventType = type; this.Chart = chart; this.Pane = pane; this.MouseEvent = mouse; this.SampleAtX = sample; this.ValueAtY = value; }
internal ChartPaneMouseEventArgs(MouseEventType type, TimeSeriesChart chart, PlotPane pane, MouseEventArgs mouse, ITimeSeriesSample sample, float value) { this.EventType = type; this.Chart = chart; this.Pane = pane; this.MouseEvent = mouse; this.SampleAtX = sample; this.ValueAtY = value; }
public override void AggregateSample(ITimeSeriesSample sample) { if (sample == null) { return; } if (sample is CandleSample) { this.AggregateSample(sample as CandleSample); } else { throw new FinancialException("{0}.AggergateSample({1}) unsupported".Args(GetType().Name, sample.GetType().Name)); } }
/// <summary> /// Replace last data sample. /// This function requires that the new sample has the same timestamp /// as the last sample in the time series data. /// </summary> public void ReplaceLast(ITimeSeriesSample sample) { var last = m_Data.Last; if (last == null) { throw new WFormsException(StringConsts.ARGUMENT_ERROR + "last sample not assigned!"); } if (sample.TimeStamp < last.Value.TimeStamp) { throw new WFormsException(StringConsts.ARGUMENT_ERROR + "inconsistent time stamp of the new sample (expected: {0}, got: {1}!".Args( last.Value.TimeStamp, sample.TimeStamp)); } m_Data.Last.Value = sample; }
/// <summary> /// Deletes sample from the set. This method is not efficient as it does linear list scan /// </summary> public bool Delete(ITimeSeriesSample sample) { return(m_Data.Remove(sample)); }
/// <summary> /// Adds sample to the series at the appropriate position. /// This method respects MaxSamples and first deletes older samples making room for new additions /// </summary> protected void Add(ITimeSeriesSample sample) { if (sample == null) { throw new WFormsException(StringConsts.ARGUMENT_ERROR + "TimeSeries.Add(sample==null)"); } //remove data over max samples while (m_Data.Count >= m_MaxSamples) { m_Data.RemoveFirst(); } var dt = sample.TimeStamp; var head = m_Data.First; if (head == null || head.Value.TimeStamp >= dt) { m_Data.AddFirst(sample); return; } var last = m_Data.Last; if (last.Value.TimeStamp <= dt) { m_Data.AddLast(sample); return; } var d1 = dt - head.Value.TimeStamp; var d2 = last.Value.TimeStamp - dt; if (d1 < d2) { var node = head; while (node != null) { if (node.Value.TimeStamp >= dt) { m_Data.AddBefore(node, sample); return; } node = node.Next; } m_Data.AddLast(sample); } else { var node = last; while (node != null) { if (node.Value.TimeStamp <= dt) { m_Data.AddAfter(node, sample); return; } node = node.Previous; } m_Data.AddFirst(sample); } }
public virtual void AggregateSample(ITimeSeriesSample sample) { throw new NotImplementedException(GetType().FullName + ".AggregateSample(sample)"); }
/// <summary> /// Aggregates source stream of the normally equidistant samples of the same type by the specified factor /// </summary> /// <param name="source">Source stream</param> /// <param name="times">Factor of aggergation, i.e. 4x means aggregate 4 samples into one</param> /// <param name="samplingRateVariationPct"> /// The allowed variation in timing between samples, once this variation is exceeded the system emits new aggregate /// </param> /// <returns>Aggregated sample stream</returns> public static IEnumerable <ITimeSeriesSample> AggregateHomogeneousSamples(this IEnumerable <ITimeSeriesSample> source, uint times, float samplingRateVariationPct = 1f) { if (source == null) { yield break; } ITimeSeriesSample prior = null; ITimeSeriesSample emit = null; float prate = 0f; var cnt = 0; foreach (var sample in source) { if (prior != null && prate != 0) { var rate = (sample.TimeStamp - prior.TimeStamp).Ticks; if ((Math.Abs(rate - prate) / Math.Max(rate, prate)) > samplingRateVariationPct) { if (emit != null) { emit.SummarizeAggregation(); yield return(emit); emit = null; } prate = 0; } else { prate = rate; } } if (emit == null) { emit = sample.MakeAggregateInstance(); prior = sample; continue; } emit.AggregateSample(sample); cnt++; if (cnt >= times) { emit.SummarizeAggregation(); yield return(emit); emit = null; } prior = sample; } if (emit == null) { yield break; } emit.SummarizeAggregation(); yield return(emit); }
/// <summary> /// Adds sample to the series at the appropriate position. /// This method respects MaxSamples and first deletes older samples making room for new additions /// </summary> protected void Add(ITimeSeriesSample sample) { if (sample==null) throw new WFormsException(StringConsts.ARGUMENT_ERROR+"TimeSeries.Add(sample==null)"); //remove data over max samples while(m_Data.Count >= m_MaxSamples) m_Data.RemoveFirst(); var dt = sample.TimeStamp; var head = m_Data.First; if (head==null || head.Value.TimeStamp >= dt) { m_Data.AddFirst(sample); return; } var last = m_Data.Last; if (last.Value.TimeStamp <= dt) { m_Data.AddLast(sample); return; } var d1 = dt - head.Value.TimeStamp; var d2 = last.Value.TimeStamp - dt; if (d1<d2) { var node = head; while(node!=null) { if (node.Value.TimeStamp>=dt) { m_Data.AddBefore(node, sample); return; } node = node.Next; } m_Data.AddLast(sample); } else { var node = last; while(node!=null) { if (node.Value.TimeStamp<=dt) { m_Data.AddAfter(node, sample); return; } node = node.Previous; } m_Data.AddFirst(sample); } }
/// <summary> /// Replace last data sample. /// This function requires that the new sample has the same timestamp /// as the last sample in the time series data. /// </summary> public void ReplaceLast(ITimeSeriesSample sample) { var last = m_Data.Last; if (last == null) throw new WFormsException(StringConsts.ARGUMENT_ERROR + "last sample not assigned!"); if (sample.TimeStamp < last.Value.TimeStamp) throw new WFormsException(StringConsts.ARGUMENT_ERROR + "inconsistent time stamp of the new sample (expected: {0}, got: {1}!".Args( last.Value.TimeStamp, sample.TimeStamp)); m_Data.Last.Value = sample; }
/// <summary> /// Deletes sample from the set. This method is not efficient as it does linear list scan /// </summary> public bool Delete(ITimeSeriesSample sample) { return m_Data.Remove(sample); }
internal void ComputeTicks(IEnumerable <ITimeSeriesSample> data, int maxSampleWidth) { m_Ticks = new List <Tick>(); //x is in non-scalable coords float x = m_Chart.VRulerPosition == VRulerPosition.Left ? m_Chart.VRulerWidth + 1 : 1; x += (maxSampleWidth / 2); float xcutof = m_Chart.VRulerPosition == VRulerPosition.Right ? Width - 1 - m_Chart.VRulerWidth : Width; var scaledMaxSampleWidth = maxSampleWidth * m_Chart.Zoom; ITimeSeriesSample priorSample = null; Tick priorTick = null; int msDist = 0; foreach (var sample in data) { var warp = false; //WARP detection if (priorSample != null) { var dist = (int)((sample.TimeStamp - priorSample.TimeStamp).TotalMilliseconds);//positive because data is pre-ordered if (msDist > 0) { var delta = 2; if (dist < 2000) { delta = 3;//more than 5x change } if (dist / msDist > delta) { //WARP! priorTick = new Tick(sample, (int)x, true, priorSample != null ? priorSample.TimeStamp.DayOfYear != sample.TimeStamp.DayOfYear : false); m_Ticks.Add(priorTick); warp = true; } msDist = (dist + msDist) / 2; } else { msDist = dist; } } if (!warp)//try to create regular scale notch { if ( (priorTick == null && x >= TICK_SPACE) || (priorTick != null && (((x - priorTick.X) >= TICK_SPACE) || (priorSample.TimeStamp.DayOfYear != sample.TimeStamp.DayOfYear))) ) { priorTick = new Tick(sample, (int)x, false, priorSample != null ? priorSample.TimeStamp.DayOfYear != sample.TimeStamp.DayOfYear : false); m_Ticks.Add(priorTick); } } priorSample = sample; x += scaledMaxSampleWidth; x += m_Chart.Zoom; if (x > xcutof) { break; } } }