Exemplo n.º 1
0
 public void AddTimeSeries(TimeSeries ts)
 {
     TimeSeries.Add(ts);
     SelectionX = 390 * ts.MetaData.SelectionStart;
     SelectionWidth = 390 * ts.MetaData.SelectionEnd - SelectionX;
     RaisePropertyChanged("SelectionX");
     RaisePropertyChanged("SelectionWidth");
 }
Exemplo n.º 2
0
 public QueryBoxView AddQueryBox(double screenX, double screenY, TimeSeries query)
 {
     var qb = AddQueryBox(screenX, screenY);
     qb.Loaded += delegate(object sender, RoutedEventArgs args)
     {
         qb.LaunchQuery(query);
     };
     return qb;
 }
Exemplo n.º 3
0
        public void DrawTimeSeries(TimeSeries ts)
        {
            var stepX = (ActualWidth-60)/ts.Values.Count;
            var stepY = ActualHeight-40;
            var points = ts.ToPointCollection(stepX, stepY, 30, 20);
            var pl = new Polyline {StrokeThickness = 3, Stroke = new SolidColorBrush(Colors.Black), Points = points};

            QueryStroqs.Add(ts.ToStroq(stepX, stepY, 30, 20));
            xInqCanvas.Children.Add(pl);
        }
 public QueryBoxListItemViewModel(TimeSeries ts, int resultNo)
 {
     TimeSeriesModel = ts;
     ResultNo = resultNo.ToString();
     Name = ts.MetaData.Name;
     Points = new PointCollection();
     SelectionX = GraphWidth*ts.MetaData.SelectionStart;
     SelectionWidth = GraphWidth * ts.MetaData.SelectionEnd - SelectionX;
     UpdateGraph();
 }
Exemplo n.º 5
0
        public void LaunchQuery(TimeSeries ts)
        {

         
            _numDataPoints = ts.Values.Count;
            xSketchPad.DrawXyAxis();
            xSketchPad.DrawTimeSeries(ts);



        }
Exemplo n.º 6
0
        public async Task<List<TimeSeries>> ParseCSV(StorageFile file)
        {
            var result = new List<TimeSeries>();

            IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read);

            using (IInputStream inputStream = stream.GetInputStreamAt(0))
            {
                var sr = new StreamReader(inputStream.AsStreamForRead());

                String line; // necessary?
                while ((line = sr.ReadLine()) != null)
                {
                    List<string> entries = CsvUtil.ParseLine(line);

                    var ts = new TimeSeries();
                    ts.MetaData.Name = entries[0];

                    double min = double.MaxValue;
                    double max = double.MinValue;

                    for (var i = 1; i < entries.Count; i++)
                    {
                        double val = double.Parse(entries[i]);
                        if (val < min)
                            min = val;
                        if (val > max)
                            max = val;

                        ts.Values.Add(float.Parse(entries[i]));
                    }

                    ts.MetaData.Min = min;
                    ts.MetaData.Max = max;

                    result.Add(ts);
                }
            }
       

            return result;
        }
Exemplo n.º 7
0
 public void Translate(TimeSeries other)
 {
     for (int i = 0; i < _values.Count; ++i)
     {
         Values.Add(_values[i] + other.Values[i]);
     }
 }
Exemplo n.º 8
0
        private void DrawGraph(TimeSeries ts)
        {
            float w = ((float) xCanvas.Width - 10)/ts.Values.Count;
            float h = (float) xCanvas.Height - 10;

            var pc = ts.ToPointCollection(w, h, 5, 5);
            _pl = new Polyline {StrokeThickness = 3, Stroke = new SolidColorBrush(Colors.White), Points = pc};
            xCanvas.Children.Add(_pl);

            /*
            var pls = new PolyLineSegment {Points = pc};
            var pf = new PathFigure();

            pf.Segments.Add(pls);
            pf.StartPoint = pc.First();
            var pg = new PathGeometry();
            pg.Figures.Add(pf);
            _geometryGroup.Children.Add(pg);
            */
        }
Exemplo n.º 9
0
 public void RemoveTimeSeries(TimeSeries ts)
 {
     TimeSeries.Remove(ts);
 }
Exemplo n.º 10
0
        private IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> ComputeRelativeMatches(TimeSeries sketchedTs)
        {
            sketchedTs.Translate(-0.5f);
            TimeSeries ts = DataCache.Instance.Index.Clone();
            ts.Translate(sketchedTs);
            //TimeSeries ts = DataCache.Instance.Index.GetAdded(sketchedTs);

            IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> comparisons =
                new List<KeyValuePair<TimeSeries, TimeSeriesComparison>>();

            foreach (var stock in _data)
            {
                TimeSeries nts = DataCache.Instance.GetResampled(stock, Settings.NUM_SAMPLES).GetNormalized();
                comparisons.Add(new KeyValuePair<TimeSeries, TimeSeriesComparison>(stock,
                    nts.CompareSpade(ts)));
            }

            return comparisons;
        }
Exemplo n.º 11
0
        private void TriggerSearch()
        {
            string annotationXCenter = SketchArea.GetAxisAnnotationAt(XyAxisLabelPosition.X_Center);

            bool isLocalSearch = (annotationXCenter != null);

            Stroq queryStroq = SketchArea.QueryStroqs[0].Clone();
            var sketchedTs = new TimeSeries();
            Rect bb = SketchArea.XyAxis.Stroq.BoundingRect;

            queryStroq.Translate(new Point(-bb.X, -bb.Y));

            List<double> dataPoints = queryStroq.Points.Select(p => (double)(SketchArea.XyAxis.Height - p.Y)).ToList();
            sketchedTs.Values = dataPoints;
            sketchedTs.Resample(Settings.NUM_SAMPLES);
            sketchedTs.NormalizeBy((float)SketchArea.XyAxis.Height);

            if (ButtonShowIndex.Visibility == Visibility.Collapsed && !isLocalSearch)
            {
                UpdateResults(ComputeRelativeMatches(sketchedTs));
                return;
            }

            if (isLocalSearch)
            {
                int numDays = int.Parse(annotationXCenter);
                UpdateResults(ComputeLocalMatches(sketchedTs, numDays));
                return;
            }

            UpdateResults(ComputeGlobalMatches(sketchedTs));
        }
Exemplo n.º 12
0
        private IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> ComputeGlobalMatches(TimeSeries query)
        {
            IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> comparisons =
                new List<KeyValuePair<TimeSeries, TimeSeriesComparison>>();

            foreach (var ts in _data)
            {
                var error = DtwKeogh.Run(query.GetResampled(ts.Values.Count), ts);
                comparisons.Add(new KeyValuePair<TimeSeries, TimeSeriesComparison>(ts, new TimeSeriesComparison(query, ts, error)));
            }

            return comparisons;
        }
Exemplo n.º 13
0
        private IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> ComputeLocalMatches(TimeSeries sketchedTimeseries,
            int numDays)
        {
            IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> comparisons =
                new List<KeyValuePair<TimeSeries, TimeSeriesComparison>>();

            var hws = (int)(numDays * Settings.SLIDING_WINDOW_RATIO);

            foreach (var stock in _data)
            {
                TimeSeries sts = stock.Clone();

                var prices = (List<double>)sts.Values;
                for (int i = 0; i < prices.Count - numDays; i += hws)
                {
                    List<double> data = prices.GetRange(i, numDays);
                    var ts = new TimeSeries(data);
                    ts.Normalize();
                    ts.Resample(Settings.NUM_SAMPLES);
                    TimeSeriesComparison compResult = ts.CompareSpade(sketchedTimeseries);
                    compResult.IsPruned = true;
                    compResult.Start = i / (double)(prices.Count - 1);
                    compResult.End = (i + numDays) / (double)(prices.Count - 1);
                    comparisons.Add(new KeyValuePair<TimeSeries, TimeSeriesComparison>(stock, compResult));
                }
            }

            return comparisons;
        }
Exemplo n.º 14
0
        private void DrawLabels(TimeSeries ts)
        {
            Title.Text = ts.MetaData.Name;

            double priceOpen = ts.Values.First();
            double priceClose = ts.Values.Last();
            Value.Text = "  " + ts.Values.Last();
            var delta = (float)Math.Round(priceClose - priceOpen, 3);
            bool isNegative = delta < 0;
            Growth.Text = "   " + (!isNegative ? "+" : "") + delta;
            GrowthRate.Text = "   " + (!isNegative ? "+" : "") + Math.Round(delta / priceOpen, 3) * 100 + "%";
            Growth.Foreground = new SolidColorBrush(GetTrendColor(delta));
            GrowthRate.Foreground = new SolidColorBrush(GetTrendColor(delta));

            var labelOpen = new TextBlock();
            labelOpen.FontSize = 16;
            labelOpen.Text = ts.MetaData.Min.ToString();
            xCanvas.Children.Add(labelOpen);
            labelOpen.Measure(new Size(int.MaxValue, int.MaxValue));
            Canvas.SetTop(labelOpen, Height - 16);
            Canvas.SetLeft(labelOpen, -labelOpen.ActualWidth - 7);

            var labelClose = new TextBlock();
            labelClose.FontSize = 16;
            labelClose.Text = ts.MetaData.Max.ToString();
            xCanvas.Children.Add(labelClose);
            labelClose.Measure(new Size(int.MaxValue, int.MaxValue));
            Canvas.SetTop(labelClose, 2);
            Canvas.SetLeft(labelClose, -labelClose.ActualWidth - 7);

            var labelStartDate = new TextBlock();
            labelStartDate.FontSize = 16;
            labelStartDate.Text = "1";
            xCanvas.Children.Add(labelStartDate);
            Canvas.SetTop(labelStartDate, Height + 7);
            Canvas.SetLeft(labelStartDate, 0);

            var labelEndDate = new TextBlock();
            labelEndDate.FontSize = 16;
            labelEndDate.Text = ts.Values.Count.ToString();
            labelEndDate.Measure(new Size(int.MaxValue, int.MaxValue));
            xCanvas.Children.Add(labelEndDate);
            Canvas.SetTop(labelEndDate, Height + 7);
            Canvas.SetLeft(labelEndDate, Width - labelEndDate.ActualWidth);
        }
Exemplo n.º 15
0
        private async void OnQueryStroqsChanged(object sender, NotifyCollectionChangedEventArgs e)
        {
            if (e.NewItems == null)
                return;

            var stroq = (Stroq) e.NewItems[0];
            stroq = stroq.Clone();
            
            var bb = xSketchPad.XyRect;
            stroq.Translate(new Point(-bb.X, -bb.Y));

            List<double> dataPoints = stroq.Points.Select(p => (bb.Height - p.Y)).ToList();
            var sketchedTs = new TimeSeries();
            sketchedTs.Values = MathUtil.ResampleLinear(dataPoints, 80);
            sketchedTs.NormalizeBy((float)bb.Height);
            if (Double.IsNaN(sketchedTs.Values[0]))
                return;

            //   IList<KeyValuePair<TimeSeries, TimeSeriesComparison>> comparisons = new List<KeyValuePair<TimeSeries, TimeSeriesComparison>>();
            var vm = (QueryBoxViewModel)DataContext;
            if (_numDataPoints == 0)
            {
                
                var latch = new CountdownLatch(vm.ListItems.Count);

                foreach (var listItem in vm.ListItems)
                {
                    await ThreadPool.RunAsync((s) =>
                    {
                        var ts = listItem.TimeSeriesModel;

                        var error = DtwKeogh.Run(sketchedTs, ts.Clone());
                        listItem.Error = error;
                        latch.Signal();
                    });
                    //comparisons.Add(new KeyValuePair<TimeSeries, TimeSeriesComparison>(ts, new TimeSeriesComparison(query, ts, error)));
                }
                latch.Wait();
                var sortedResults = vm.ListItems.OrderBy(o => o.Error).ToList();
                vm.ListItems.Clear();
                for (int i = 0; i < sortedResults.Count; i++)
                {
                    vm.ListItems.Add(new QueryBoxListItemViewModel(sortedResults[i].TimeSeriesModel, i + 1));
                }
            }
            else
            {
                var data = GetQueryData();
                //vm.ListItems.Clear();
                var items = new List<QueryBoxListItemViewModel>();
                

                foreach (var ts in data)
                {
                    items.Add(new QueryBoxListItemViewModel(ts, 0));
                }

                var latch = new CountdownLatch(data.Count);

                foreach (var listItem in items)
                {

                    await ThreadPool.RunAsync((s) =>
                    {
                        var ts = listItem.TimeSeriesModel;
                        var error = DtwKeogh.Run(sketchedTs, ts.Clone());
                        listItem.Error = error;
                        latch.Signal();
                    });
                    //comparisons.Add(new KeyValuePair<TimeSeries, TimeSeriesComparison>(ts, new TimeSeriesComparison(query, ts, error)));
                }
                latch.Wait();
                var sortedResults = items.OrderBy(o => o.Error).ToList().GetRange(0,50);

                vm.ListItems.Clear();
                for (int i = 0; i < sortedResults.Count; i++)
                {
                    var t = sortedResults[i].TimeSeriesModel.RepresentationOf.Clone();
                    t.MetaData.SelectionStart = sortedResults[i].TimeSeriesModel.MetaData.SelectionStart;
                    t.MetaData.SelectionEnd = sortedResults[i].TimeSeriesModel.MetaData.SelectionEnd;
                    vm.ListItems.Add(new QueryBoxListItemViewModel(t, i + 1));
                }
            }
            _numDataPoints = 0;
        }
Exemplo n.º 16
0
        public static double Run(TimeSeries q, TimeSeries data)
        {
            int r = 10;

            // Prepare q
            int m = q.Values.Count;
            double ex = 0, ex2 = 0;
            for (var i = 0; i < q.Values.Count; i++)
            {
                double d = q.Values[i];
                ex += d;
                ex2 += d*d;
            }

            double mean = ex/m;
            double std = ex2/m;
            std = Math.Sqrt(std - mean*mean);
            if (q.Values[0] == Double.NaN)
            {
                Debug.WriteLine("asdf");
            }
            for (var i = 0; i < q.Values.Count; i++)
                q.Values[i] = (q.Values[i] - mean)/std;

            // Create envelop of the q: lower envelop, l, and upper envelop, u
            TimeSeries l, u;
            lower_upper_lemire(q, r, out l, out u);

            var Q_tmp = new List<Index>();
            /// Sort the query one time by abs(z-norm(q[i]))

            for (var i = 0; i < m; i++)
            {
                Q_tmp.Add(new Index());
                Q_tmp[i].value = q.Values[i];
                Q_tmp[i].index = i;
            }

            Q_tmp.Sort();

            var order = new int[m];
            var qo = new TimeSeries(m);
            var uo = new TimeSeries(m);
            var lo = new TimeSeries(m);

            for (var i = 0; i < m; i++)
            {
                int o = Q_tmp[i].index;
                order[i] = o;
                qo.Values[i] = q.Values[o];
                uo.Values[i] = u.Values[o];
                lo.Values[i] = l.Values[o];
            }

            // Initial the cummulative lower bound
            var cb = new TimeSeries(m);
            var cb1 = new TimeSeries(m);
            var cb2 = new TimeSeries(m);

            var buffer = new TimeSeries(m);
            ex = ex2 = 0;
            int it = 0, ep = 0, k = 0;

            for (k = 0; k < m; k++)
            {
                var d = data.Values[k];
                buffer.Values[k] = d;
            }

            TimeSeries l_buff, u_buff;

            lower_upper_lemire(buffer, r, out l_buff, out u_buff);

            var t = new TimeSeries(2*m);

            // Do main task here..
            ex = 0;
            ex2 = 0;
            var bsf = double.MaxValue;
            for (var i = 0; i < buffer.Values.Count; i++)
            {
                /// A bunch of data has been read and pick one of them at a time to use
                var d = buffer.Values[i];

                /// Calcualte sum and sum square
                ex += d;
                ex2 += d*d;

                /// t is a circular array for keeping current data
                t.Values[i%m] = d;

                /// Double the size for avoiding using modulo "%" operator
                t.Values[(i%m) + m] = d;

                /// Start the task when there are more than m-1 points in the current chunk
                if (i >= m - 1)
                {
                    mean = ex/m;
                    std = ex2/m;
                    std = Math.Sqrt(std - mean*mean);

                    /// compute the start location of the data in the current circular array, t
                    var j = (i + 1)%m;
                    /// the start location of the data in the current chunk
                    var I = i - (m - 1);

                    /// Use a constant lower bound to prune the obvious subsequence
                    var lb_kim = lb_kim_hierarchy(t, q, j, m, mean, std, bsf);

                    if (lb_kim < bsf)
                    {
                        /// Use a linear time lower bound to prune; z_normalization of t will be computed on the fly.
                        /// uo, lo are envelop of the query.
                        var lb_k = lb_keogh_cumulative(order, t, uo, lo, cb1, j, m, mean, std, bsf);

                        if (lb_k < bsf)
                        {
                            var tz = new TimeSeries(m);
                            /// Take another linear time to compute z_normalization of t.
                            /// Note that for better optimization, this can merge to the previous function.
                            for (k = 0; k < m; k++)
                            {
                                tz.Values[k] = (t.Values[(k + j)] - mean)/std;
                            }

                            /// Use another lb_keogh to prune
                            /// qo is the sorted query. tz is unsorted z_normalized data.
                            /// l_buff, u_buff are big envelop for all data in this chunk
                            var lb_k2 = lb_keogh_data_cumulative(order, tz, qo, cb2, l_buff, u_buff , I, m, mean,
                                std, bsf);

                            if (lb_k2 < bsf)
                            {
                                /// Choose better lower bound between lb_keogh and lb_keogh2 to be used in early abandoning DTW
                                /// Note that cb and cb2 will be cumulative summed here.
                                if (lb_k > lb_k2)
                                {
                                    cb.Values[m - 1] = cb1.Values[m - 1];
                                    for (k = m - 2; k >= 0; k--)
                                        cb.Values[k] = cb.Values[k + 1] + cb1.Values[k];
                                }
                                else
                                {
                                    cb.Values[m - 1] = cb2.Values[m - 1];
                                    for (k = m - 2; k >= 0; k--)
                                        cb.Values[k] = cb.Values[k + 1] + cb2.Values[k];
                                }

                                /// Compute DTW and early abandoning if possible
                                var dist = dtw(tz, q, cb, m, r, bsf);

                                if (dist < bsf)
                                {   /// Update bsf
                                    /// loc is the real starting location of the nearest neighbor in the file
                                    bsf = dist;
                                    //var loc = (it) * (EPOCH - m + 1) + i - m + 1;
                                    var loc = i - m + 1;
                                }
                            }

                        }
                    }

                    /// Reduce obsolute points from sum and sum square
                    ex -= t.Values[j];
                    ex2 -= t.Values[j] * t.Values[j];
                }

            }

            return bsf;
        }
Exemplo n.º 17
0
        private List<TimeSeries> GetQueryData()
        {
            if (_numDataPoints == 0)
            {
                return Controller.Data80.ToList();
            }

            var result = new List<TimeSeries>();

            foreach (var ts in Controller.Data80)
            {
                var vals = (List<double>)ts.Values.ToList();
                for (int i = 0; i < vals.Count - _numDataPoints; i += 4)
                {
                    List<double> data = vals.GetRange(i, _numDataPoints);
                    var nts = new TimeSeries(data.ToList());
                    nts.Normalize();
                    nts = nts.GetResampled(80);
                    nts.RepresentationOf = ts.Clone();
                    nts.MetaData.IsPruned = true;
                    nts.MetaData.SelectionStart = i / (double)(vals.Count - 1);
                    nts.MetaData.SelectionEnd = (i + _numDataPoints) / (double)(vals.Count - 1);
                    result.Add(nts);
                }
            }
            return result;
        } 
Exemplo n.º 18
0
        private static void lower_upper_lemire(TimeSeries t, int r, out TimeSeries l, out TimeSeries u)
        {
            l = new TimeSeries(t.Values.Count);
            u = new TimeSeries(t.Values.Count);

            var du = new Deque(2*r + 2);
            var dl = new Deque(2*r + 2);

            du.push_back(0);
            dl.push_back(0);

            for (int i = 1; i < t.Values.Count; i++)
            {
                if (i > r)
                {
                    u.Values[i - r - 1] = t.Values[du.front()];
                    l.Values[i - r - 1] = t.Values[dl.front()];
                }
                if (t.Values[i] > t.Values[i - 1])
                {
                    du.pop_back();
                    while (!du.empty() && t.Values[i] > t.Values[du.back()])
                        du.pop_back();
                }
                else
                {
                    dl.pop_back();
                    while (!dl.empty() && t.Values[i] < t.Values[dl.back()])
                        dl.pop_back();
                }
                du.push_back(i);
                dl.push_back(i);
                if (i == 2*r + 1 + du.front())
                    du.pop_front();
                else if (i == 2*r + 1 + dl.front())
                    dl.pop_front();
            }

            for (int i = t.Values.Count; i < t.Values.Count + r + 1; i++)
            {
                u.Values[i - r - 1] = t.Values[du.front()];
                l.Values[i - r - 1] = t.Values[dl.front()];
                if (i - du.front() >= 2*r + 1)
                    du.pop_front();
                if (i - dl.front() >= 2*r + 1)
                    dl.pop_front();
            }
        }
Exemplo n.º 19
0
        private static double lb_kim_hierarchy(TimeSeries t, TimeSeries q, int j, int len, double mean, double std,
            double bsf = double.PositiveInfinity)
        {
            /// 1 point at front and back
            double d, lb;
            double x0 = (t.Values[j] - mean)/std;
            double y0 = (t.Values[(len - 1 + j)] - mean)/std;
            lb = dist(x0, q.Values[0]) + dist(y0, q.Values[len - 1]);
            if (lb >= bsf) return lb;

            /// 2 points at front
            double x1 = (t.Values[(j + 1)] - mean)/std;
            d = Math.Min(dist(x1, q.Values[0]), dist(x0, q.Values[1]));
            d = Math.Min(d, dist(x1, q.Values[1]));
            lb += d;
            if (lb >= bsf) return lb;

            /// 2 points at back
            double y1 = (t.Values[(len - 2 + j)] - mean)/std;
            d = Math.Min(dist(y1, q.Values[len - 1]), dist(y0, q.Values[len - 2]));
            d = Math.Min(d, dist(y1, q.Values[len - 2]));
            lb += d;
            if (lb >= bsf) return lb;

            /// 3 points at front
            double x2 = (t.Values[(j + 2)] - mean)/std;
            d = Math.Min(dist(x0, q.Values[2]), dist(x1, q.Values[2]));
            d = Math.Min(d, dist(x2, q.Values[2]));
            d = Math.Min(d, dist(x2, q.Values[1]));
            d = Math.Min(d, dist(x2, q.Values[0]));
            lb += d;
            if (lb >= bsf) return lb;

            /// 3 points at back
            double y2 = (t.Values[(len - 3 + j)] - mean)/std;
            d = Math.Min(dist(y0, q.Values[len - 3]), dist(y1, q.Values[len - 3]));
            d = Math.Min(d, dist(y2, q.Values[len - 3]));
            d = Math.Min(d, dist(y2, q.Values[len - 2]));
            d = Math.Min(d, dist(y2, q.Values[len - 1]));
            lb += d;

            return lb;
        }
Exemplo n.º 20
0
        private static double lb_keogh_data_cumulative(int[] order, TimeSeries tz, TimeSeries qo, TimeSeries cb, TimeSeries l, TimeSeries u, int offset, int len, double mean, double std, double best_so_far = double.MaxValue)
        {
            double lb = 0;
            double uu,ll,d;

            for (int i = 0; i < len && lb < best_so_far; i++)
            {
                uu = (u.Values[offset + order[i]] - mean) / std;
                ll = (l.Values[offset + order[i]] - mean) / std;
                d = 0;
                if (qo.Values[i] > uu)
                    d = dist(qo.Values[i], uu);
                else
                {   if(qo.Values[i] < ll)
                    d = dist(qo.Values[i], ll);
                }
                lb += d;
                cb.Values[order[i]] = d;
            }
            return lb;
        }
Exemplo n.º 21
0
        private static double lb_keogh_cumulative(int[] order, TimeSeries t, TimeSeries uo, TimeSeries lo, TimeSeries cb,
            int j, int len, double mean, double std, double best_so_far = double.MaxValue)
        {
            double lb = 0;
            double x, d;

            for (int i = 0; i < len && lb < best_so_far; i++)
            {
                x = (t.Values[(order[i] + j)] - mean)/std;
                d = 0;
                if (x > uo.Values[i])
                    d = dist(x, uo.Values[i]);
                else if (x < lo.Values[i])
                    d = dist(x, lo.Values[i]);
                lb += d;
                cb.Values[order[i]] = d;
            }
            return lb;
        }
Exemplo n.º 22
0
        private static double dtw(TimeSeries A, TimeSeries B, TimeSeries cb, int m, int r, double bsf = double.MaxValue)
        {
            int k;
            var cost = new double[(2*r + 1)];
            for (k = 0; k < 2 * r + 1; k++)
                cost[k] = double.MaxValue;

            var cost_prev = new double[(2 * r + 1)];
            for (k = 0; k < 2 * r + 1; k++)
                cost_prev[k] = double.MaxValue;

            double[] cost_tmp;
            int i, j;
            double x, y, z, min_cost;

            /// Instead of using matrix of size O(m^2) or O(mr), we will reuse two array of size O(r).

            for (i = 0; i < m; i++)
            {
                k = Math.Max(0, r - i);
                min_cost = Double.MaxValue;

                for (j = Math.Max(0, i - r); j <= Math.Min(m - 1, i + r); j++, k++)
                {
                    /// Initialize all row and column
                    if ((i == 0) && (j == 0))
                    {
                        cost[k] = dist(A.Values[0], B.Values[0]);
                        min_cost = cost[k];
                        continue;
                    }

                    if ((j - 1 < 0) || (k - 1 < 0)) y = double.MaxValue;
                    else y = cost[k - 1];
                    if ((i - 1 < 0) || (k + 1 > 2 * r)) x = double.MaxValue;
                    else x = cost_prev[k + 1];
                    if ((i - 1 < 0) || (j - 1 < 0)) z = double.MaxValue;
                    else z = cost_prev[k];

                    /// Classic DTW calculation
                    cost[k] = Math.Min(Math.Min(x, y), z) + dist(A.Values[i], B.Values[j]);

                    /// Find minimum cost in row for early abandoning (possibly to use column instead of row).
                    if (cost[k] < min_cost)
                    {
                        min_cost = cost[k];
                    }
                }

                /// We can abandon early if the current cummulative distace with lower bound together are larger than bsf
                if (i + r < m - 1 && min_cost + cb.Values[i + r + 1] >= bsf)
                {
                    return min_cost + cb.Values[i + r + 1];
                }

                /// Move current array to previous array.
                cost_tmp = cost;
                cost = cost_prev;
                cost_prev = cost_tmp;
            }
            k--;

            /// the DTW distance is in the last cell in the matrix of size O(m^2) or at the middle of our array.
            double final_dtw = cost_prev[k];
            return final_dtw;
        }