Пример #1
0
        /// <summary>
        /// Normalizes the given time series according to its minimum and maximum value and adjusts each value within the range[low, high].
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <param name="high">Maximum final value (Defaults to 1.0).</param>
        /// <param name="low">Minimum final value (Defaults to 0.0).</param>
        /// <param name="epsilon">Safeguard for constant (or near constant) time series as the operation implies a unit scale operation
        /// between min and max values in the tss.</param>
        /// <returns>KhivaArray with the same dimensions as tss, whose values (time series in dimension 0) have been
        /// normalized by maximum and minimum values, and scaled as per high and low parameters.</returns>
        public static KhivaArray MaxMinNorm(KhivaArray tss, double high, double low, double epsilon = 0.00000001)
        {
            var reference = tss.Reference;

            DLLNormalization.max_min_norm(ref reference, ref high, ref low, ref epsilon, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #2
0
        /// <summary>
        /// Returns the kth moment of the given time series.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <param name="k">The specific moment to be calculated.</param>
        /// <returns>The kth moment of the given time series.</returns>
        public static KhivaArray MomentStatistics(KhivaArray tss, int k)
        {
            var reference = tss.Reference;

            DLLStatistics.moment_statistics(ref reference, ref k, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #3
0
        /// <summary>
        /// Calculates the number of Perceptually Important Points (PIP) in the time series.
        ///
        /// [1] Fu TC, Chung FL, Luk R, and Ng CM. Representing financial time series based on data point importance.
        /// Engineering Applications of Artificial Intelligence, 21(2):277-300, 2008.
        /// </summary>
        /// <param name="arr">Expects an input array whose dimension zero is the length of the time series.</param>
        /// <param name="numberIps">The number of points to be returned.</param>
        /// <returns>KhivaArray with the most Perceptually Important number_ips.</returns>
        public static KhivaArray Pip(KhivaArray arr, int numberIps)
        {
            var reference = arr.Reference;

            DLLDimensionality.pip(ref reference, ref numberIps, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #4
0
        /// <summary>
        /// Applies the Piecewise Linear Approximation (PLA BottomUP) to the time series.
        ///
        /// [1] Zhu Y, Wu D, Li Sh (2007). A Piecewise Linear Representation Method of Time Series Based on Feature Points.
        /// Knowledge-Based Intelligent Information and Engineering Systems 4693:1066-1072.
        /// </summary>
        /// <param name="arr">Expects a khiva_array containing the set of points to be reduced. The first component of the points in
        /// the first column and the second component of the points in the second column.</param>
        /// <param name="maxError">The maximum approximation error allowed.</param>
        /// <returns>The reduced number of points.</returns>
        public static KhivaArray PlaBottomUp(KhivaArray arr, float maxError)
        {
            var reference = arr.Reference;

            DLLDimensionality.pla_bottom_up(ref reference, ref maxError, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #5
0
        /// <summary>
        /// Reduces a set of points by applying the Visvalingam method (minimum triangle area) until the number
        /// of points is reduced to numPoints.
        ///
        /// [1] M. Visvalingam and J. D. Whyatt, Line generalisation by repeated elimination of points,
        /// The Cartographic Journal, 1993.
        /// </summary>
        /// <param name="points">KhivaArray with the x-coordinates and y-coordinates of the input points (x in column 0 and y in column 1).</param>
        /// <param name="numPoints">Sets the number of points returned after the execution of the method.</param>
        /// <returns>KhivaArray with the x-coordinates and y-coordinates of the selected points (x in column 0 and y in column 1).</returns>
        public static KhivaArray Visvalingam(KhivaArray points, int numPoints)
        {
            var reference = points.Reference;

            DLLDimensionality.visvalingam(ref reference, ref numPoints, out var result);
            points.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #6
0
        /// <summary>
        /// Piecewise Aggregate Approximation (PAA) approximates a time series \f$X\f$ of length \f$n\f$ into vector
        /// \f$\bar{X}=(\bar{x}_{1},…,\bar{x}_{M})\f$ of any arbitrary length \f$M \leq n\f$ where each of \f$\bar{x_{i}}\f$ is
        /// calculated as follows:
        /// \f[
        /// \bar{x}_{i} = \frac{M}{n} \sum_{j=n/M(i-1)+1}^{(n/M)i} x_{j}.
        /// \f]
        /// Which simply means that in order to reduce the dimensionality from \f$n\f$ to \f$M\f$, we first divide the original
        /// time series into \f$M\f$ equally sized frames and secondly compute the mean values for each frame. The sequence
        /// assembled from the mean values is the PAA approximation (i.e., transform) of the original time series.
        /// </summary>
        /// <param name="arr">Set of points.</param>
        /// <param name="bins">Sets the total number of divisions.</param>
        /// <returns>An array of points with the reduced dimensionality.</returns>
        public static KhivaArray Paa(KhivaArray arr, int bins)
        {
            var reference = arr.Reference;

            DLLDimensionality.paa(ref reference, ref bins, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #7
0
        /// <summary>
        /// Symbolic Aggregate approXimation (SAX). It transforms a numeric time series into a time series of symbols with
        /// the same size. The algorithm was proposed by Lin et al.) and extends the PAA-based approach inheriting the original
        /// algorithm simplicity and low computational complexity while providing satisfactory sensitivity and selectivity in
        /// range query processing. Moreover, the use of a symbolic representation opened a door to the existing wealth of
        /// data-structures and string-manipulation algorithms in computer science such as hashing, regular expression, pattern
        /// matching, suffix trees, and grammatical inference.
        ///
        /// [1] Lin, J., Keogh, E., Lonardi, S. &amp;; Chiu, B. (2003) A Symbolic Representation of Time Series, with Implications for
        /// Streaming Algorithms. In proceedings of the 8th ACM SIGMOD Workshop on Research Issues in Data Mining and Knowledge
        /// Discovery. San Diego, CA. June 13.
        /// </summary>
        /// <param name="arr">KhivaArray with the input time series.</param>
        /// <param name="alphabetSize">Number of element within the alphabet.</param>
        /// <returns>An array of symbols.</returns>
        public static KhivaArray SAX(KhivaArray arr, int alphabetSize)
        {
            var reference = arr.Reference;

            DLLDimensionality.sax(ref reference, ref alphabetSize, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #8
0
        /// <summary>
        /// The Ramer–Douglas–Peucker algorithm (RDP) is an algorithm for reducing the number of points in a curve
        /// that is approximated by a series of points. It reduces a set of points depending on the perpendicular distance of
        /// the points and epsilon, the greater epsilon, more points are deleted.
        ///
        /// [1] Urs Ramer, "An iterative procedure for the polygonal approximation of plane curves", Computer Graphics and
        /// Image Processing, 1(3), 244–256 (1972) doi:10.1016/S0146-664X(72)80017-0.
        ///
        /// [2] David Douglas &amp;; Thomas Peucker, "Algorithms for the reduction of the number of points required to represent a
        /// digitized line or its caricature", The Canadian Cartographer 10(2), 112–122 (1973).
        /// doi:10.3138/FM57-6770-U75U-7727
        /// </summary>
        /// <param name="points">KhivaArray with the x-coordinates and y-coordinates of the input points (x in column 0 and y in column 1).</param>
        /// <param name="epsilon">It acts as the threshold value to decide which points should be considered meaningful or not.</param>
        /// <returns>KhivaArray with the x-coordinates and y-coordinates of the selected points (x in column 0 and y in column 1).</returns>
        public static KhivaArray RamerDouglasPeucker(KhivaArray points, double epsilon)
        {
            var reference = points.Reference;

            DLLDimensionality.ramer_douglas_peucker(ref reference, ref epsilon, out var result);
            points.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #9
0
        /// <summary>
        /// Calculates the Shape-Based distance (SBD). It computes the normalized cross-correlation and it returns 1.0
        /// minus the value that maximizes the correlation value between each pair of time series.
        /// </summary>
        /// <param name="arr">Expects an input array whose dimension zero is the length of the time series (all the same) and
        /// dimension one indicates the number of time series.</param>
        /// <returns>An upper triangular matrix where each position corresponds to the distance between two time series.
        /// Diagonal elements will be zero. For example: Position row 0 column 1 records the distance between time series 0
        /// and time series 1.</returns>
        public static KhivaArray Sbd(KhivaArray arr)
        {
            var reference = arr.Reference;

            DLLDistances.sbd(ref reference, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #10
0
        /// <summary>
        /// Applies the Piecewise Linear Approximation (PLA Sliding Window) to the time series.
        ///
        /// [1] Zhu Y, Wu D, Li Sh (2007). A Piecewise Linear Representation Method of Time Series Based on Feature Points.
        /// Knowledge-Based Intelligent Information and Engineering Systems 4693:1066-1072.
        /// </summary>
        /// <param name="arr">Expects a khiva_array containing the set of points to be reduced. The first component of the points in
        /// the first column and the second component of the points in the second column.</param>
        /// <param name="maxError">The maximum approximation error allowed.</param>
        /// <returns>The reduced number of points.</returns>
        public static KhivaArray PlaSlidingWindow(KhivaArray arr, float maxError)
        {
            var reference = arr.Reference;

            DLLDimensionality.pla_sliding_window(ref reference, ref maxError, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #11
0
        /// <summary>
        /// Calculates a new set of times series with zero mean and standard deviation one.
        /// </summary>
        /// <param name="tss">Time series concatenated in a single row.</param>
        /// <param name="epsilon"> Minimum standard deviation to consider. It acts as a gatekeeper for
        /// those time series that may be constant or near constant.</param>
        /// <returns>KhivaArray with the same dimensions as tss where the time series have been
        /// adjusted for zero mean and one as standard deviation.</returns>
        public static KhivaArray ZNorm(KhivaArray tss, double epsilon)
        {
            var reference = tss.Reference;

            DLLNormalization.znorm(ref reference, ref epsilon, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #12
0
        /// <summary>
        /// Estimates standard deviation based on a sample. The standard deviation is calculated using the "n-1" method.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <returns>The sample standard deviation.</returns>
        public static KhivaArray SampleStdevStatistics(KhivaArray tss)
        {
            var reference = tss.Reference;

            DLLStatistics.sample_stdev_statistics(ref reference, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #13
0
        /// <summary>
        /// Discretizes the time series into equal-sized buckets based on sample quantiles.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.NOTE: the time series should be sorted.</param>
        /// <param name="quantiles">Number of quantiles to extract. From 0 to 1, step 1/quantiles.</param>
        /// <param name="precision">Number of decimals expected.</param>
        /// <returns>Matrix with the categories, one category per row, the start of the category in the first column and
        /// the end in the second category.</returns>
        public static KhivaArray QuantilesCutStatistics(KhivaArray tss, float quantiles, float precision = 1e-8F)
        {
            var reference = tss.Reference;

            DLLStatistics.quantiles_cut_statistics(ref reference, ref quantiles, ref precision, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #14
0
        /// <summary>
        /// Normalizes the given time series according to its maximum-minimum value and its mean. It follows the following
        /// formulae:
        /// \f[
        /// \acute{x} = \frac{x - mean(x)}{max(x) - min(x)}.
        /// \f]
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <returns>An array with the same dimensions as tss, whose values (time series in dimension 0) have been
        /// normalized by subtracting the mean from each number and dividing each number by \f$ max(x) - min(x)\f$, in the
        /// time series.</returns>
        public static KhivaArray MeanNorm(KhivaArray tss)
        {
            var reference = tss.Reference;

            DLLNormalization.mean_norm(ref reference, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #15
0
        /// <summary>
        /// Returns the covariance matrix of the time series contained in tss.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <param name="unbiased">Determines whether it divides by n - 1 (if false) or n (if true).</param>
        /// <returns>The covariance matrix of the time series.</returns>
        public static KhivaArray CovarianceStatistics(KhivaArray tss, bool unbiased)
        {
            var reference = tss.Reference;

            DLLStatistics.covariance_statistics(ref reference, ref unbiased, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #16
0
        /// <summary>
        /// Calculates the Shape-Based distance (SBD). It computes the normalized cross-correlation and it returns 1.0
        /// minus the value that maximizes the correlation value between each pair of time series.
        /// </summary>
        /// <param name="arr">Expects an input array whose dimension zero is the length of the time series (all the same) and
        /// dimension one indicates the number of time series.</param>
        /// <returns>An upper triangular matrix where each position corresponds to the distance between two time series.
        /// Diagonal elements will be zero. For example: Position row 0 column 1 records the distance between time series 0
        /// and time series 1.</returns>
        public static KhivaArray SquaredEuclidean(KhivaArray arr)
        {
            var reference = arr.Reference;

            DLLDistances.squared_euclidean(ref reference, out var result);
            arr.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #17
0
        /// <summary>
        /// Returns the kurtosis of tss (calculated with the adjusted Fisher-Pearson standardized moment coefficient G2).
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <returns>The kurtosis of tss.</returns>
        public static KhivaArray KurtosisStatistics(KhivaArray tss)
        {
            var reference = tss.Reference;

            DLLStatistics.kurtosis_statistics(ref reference, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #18
0
        /// <summary>
        /// The Ljung–Box test checks that data within the time series are independently distributed (i.e. the
        /// correlations in the population from which the sample is taken are 0, so that any observed correlations in the data
        /// result from randomness of the sampling process). Data are no independently distributed, if they exhibit serial
        /// correlation.
        ///
        /// The test statistic is:
        ///
        /// \f[
        /// Q = n\left(n + 2\right)\sum_{ k = 1}^h\frac{\hat{\rho}^2_k}{n-k}
        /// \f]
        ///
        /// where ''n'' is the sample size, \f$\hat{\rho}k \f$ is the sample autocorrelation at lag ''k'', and ''h'' is the
        /// number of lags being tested.Under \f$ H_0 \f$ the statistic Q follows a \f$\chi^2{ (h)} \f$. For significance level
        /// \f$\alpha\f$, the \f$critical region\f$ for rejection of the hypothesis of randomness is:
        ///
        /// \f[
        /// Q > \chi_{1-\alpha,h}^2
        /// \f]
        ///
        /// where \f$ \chi_{1-\alpha,h}^2 \f$ is the \f$\alpha\f$-quantile of the chi-squared distribution with ''h'' degrees of
        /// freedom.
        ///
        /// [1] G.M.Ljung G. E.P.Box (1978). On a measure of lack of fit in time series models.
        /// Biometrika, Volume 65, Issue 2, 1 August 1978, Pages 297–303.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <param name="lags">Number of lags being tested.</param>
        /// <returns>The Ljung-Box statistic test.</returns>
        public static KhivaArray LjungBox(KhivaArray tss, long lags)
        {
            var reference = tss.Reference;

            DLLStatistics.ljung_box(ref reference, ref lags, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #19
0
        /// <summary>
        /// Calculates the roots of a polynomial with coefficients given in \f$p\f$. The values in the rank-1 array
        /// \f$p\f$ are coefficients of a polynomial.If the length of \f$p\f$ is \f$n+1\f$ then the polynomial is described by:
        /// \f[
        ///p[0] * x ^ n + p[1] * x ^{n-1} + ... + p[n - 1] * x + p[n]
        /// \f]
        /// </summary>
        /// <param name="p">KhivaArray of polynomial coefficients.</param>
        /// <returns>KhivaArray containing the roots of the polynomial.</returns>
        public static KhivaArray Roots(KhivaArray p)
        {
            var reference = p.Reference;

            DLLPolynomial.roots(ref reference, out var result);
            p.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #20
0
        /// <summary>
        /// Normalizes the given time series according to its maximum value and adjusts each value within the range (-1, 1).
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <returns>An array with the same dimensions as tss, whose values (time series in dimension 0) have been
        /// normalized by dividing each number by 10^j, where j is the number of integer digits of the max number in the time
        /// series.</returns>
        public static KhivaArray DecimalScalingNorm(KhivaArray tss)
        {
            var reference = tss.Reference;

            DLLNormalization.decimal_scaling_norm(ref reference, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #21
0
        /// <summary>
        /// Calculates the sample skewness of tss (calculated with the adjusted Fisher-Pearson standardized moment coefficient G1).
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.</param>
        /// <returns>KhivaArray containing the skewness of each time series in tss.</returns>
        public static KhivaArray SkewnessStatistics(KhivaArray tss)
        {
            var reference = tss.Reference;

            DLLStatistics.skewness_statistics(ref reference, out var result);
            tss.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #22
0
        /// <summary>
        /// Returns values at the given quantile.
        /// </summary>
        /// <param name="tss">Expects an input array whose dimension zero is the length of the time series (all the same) and dimension
        /// one indicates the number of time series.NOTE: the time series should be sorted.</param>
        /// <param name="q">Percentile(s) at which to extract score(s). One or many.</param>
        /// <param name="precision">Number of decimals expected.</param>
        /// <returns>Values at the given quantile.</returns>
        public static KhivaArray QuantileStatistics(KhivaArray tss, KhivaArray q, float precision = 1e-8F)
        {
            var reference  = tss.Reference;
            var qReference = q.Reference;

            DLLStatistics.quantile_statistics(ref reference, ref qReference, ref precision, out var result);
            tss.Reference = reference;
            q.Reference   = qReference;
            return(KhivaArray.Create(result));
        }
Пример #23
0
        /// <summary>
        /// Mueen's Algorithm for Similarity Search.
        ///
        /// The result has the following structure:
        ///  - 1st dimension corresponds to the index of the subsequence in the time series.
        ///  - 2nd dimension corresponds to the number of queries.
        ///  - 3rd dimension corresponds to the number of time series.
        ///
        /// For example, the distance in the position (1, 2, 3) correspond to the distance of the third query to the fourth time
        /// series for the second subsequence in the time series.
        ///
        /// [1] Yan Zhu, Zachary Zimmerman, Nader Shakibay Senobari, Chin-Chia Michael Yeh, Gareth Funning, Abdullah Mueen,
        /// Philip Brisk and Eamonn Keogh (2016). Matrix Profile II: Exploiting a Novel Algorithm and GPUs to break the one
        /// Hundred Million Barrier for Time Series Motifs and Joins. IEEE ICDM 2016.
        /// </summary>
        /// <param name="query">Array whose first dimension is the length of the query time series and the second dimension
        /// is the number of queries.</param>
        /// <param name="tss">Array whose first dimension is the length of the time series and the second dimension is the
        /// number of time series.</param>
        /// <returns>Resulting distances.</returns>
        public static KhivaArray Mass(KhivaArray query, KhivaArray tss)
        {
            var q = query.Reference;
            var t = tss.Reference;

            DLLMatrix.mass(ref q, ref t, out var distances);
            query.Reference = q;
            tss.Reference   = t;
            return(KhivaArray.Create(distances));
        }
Пример #24
0
        /// <summary>
        ///  Group by operation in the input array using n_columns_key columns as group keys and n_columns_value columns as
        /// values.The data is expected to be sorted.The aggregation function determines the operation to aggregate the values.
        /// </summary>
        /// <param name="array">Expects an input array whose dimension zero is the length of the time
        /// series(all the same) and dimension one indicates the number of time series.</param>
        /// <param name="aggregationFunction">Function to be used in the aggregation. It receives an integer which indicates the
        /// function to be applied:
        ///          {
        ///              0 : mean,
        ///              1 : median
        ///              2 : min,
        ///              3 : max,
        ///              4 : stdev,
        ///              5 : var,
        ///              default : mean
        ///             }
        ///</param>
        /// <param name="nColumnsKey">Number of columns conforming the key.</param>
        /// <param name="nColumnsValue">Number of columns conforming the value (they are expected to be consecutive to the column.</param>
        /// <returns>An array with the values of the group keys aggregated using the aggregation_function.</returns>
        public static KhivaArray GroupBy(KhivaArray array, int aggregationFunction, int nColumnsKey = 1,
                                         int nColumnsValue = 1)
        {
            var reference = array.Reference;

            DLLRegularization.group_by(ref reference, ref aggregationFunction, ref nColumnsKey, ref nColumnsValue,
                                       out var result);
            array.Reference = reference;
            return(KhivaArray.Create(result));
        }
Пример #25
0
        /// <summary>
        /// Calculates the minimum norm least squares solution \f$x\f$ \f$(\left\lVert{A·x - b}\right\rVert^2)\f$ to
        /// \f$A·x = b\f$. This function uses the singular value decomposition function of Arrayfire.The actual formula that
        /// this function computes is \f$x = V·D\dagger·U^T·b\f$. Where \f$U\f$ and \f$V\f$ are orthogonal matrices and
        /// \f$D\dagger\f$ contains the inverse values of the singular values contained in D if they are not zero, and zero
        /// otherwise.
        /// </summary>
        /// <param name="a">A coefficient matrix containing the coefficients of the linear equation problem to solve.</param>
        /// <param name="b">A vector with the measured values.</param>
        /// <returns>Contains the solution to the linear equation problem minimizing the norm 2.</returns>
        public static KhivaArray Lls(KhivaArray a, KhivaArray b)
        {
            var aReference = a.Reference;
            var bReference = b.Reference;

            DLLLinAlg.lls(ref aReference, ref bReference, out var result);
            a.Reference = aReference;
            b.Reference = bReference;
            return(KhivaArray.Create(result));
        }
Пример #26
0
        /// <summary>
        /// Least squares polynomial fit. Fit a polynomial \f$p(x) = p[0] * x^{deg} + ... + p[deg]\f$ of degree \f$deg\f$
        /// to points \f$(x, y)\f$. Returns a vector of coefficients \f$p\f$ that minimises the squared error.
        /// </summary>
        /// <param name="x">x-coordinates of the M sample points \f$(x[i], y[i])\f$.</param>
        /// <param name="y">y-coordinates of the sample points.</param>
        /// <param name="deg">Degree of the fitting polynomial.</param>
        /// <returns>Polynomial coefficients, highest power first.</returns>
        public static KhivaArray PolyFit(KhivaArray x, KhivaArray y, int deg)
        {
            var xReference = x.Reference;
            var yReference = y.Reference;

            DLLPolynomial.polyfit(ref xReference, ref yReference, ref deg, out var result);
            x.Reference = xReference;
            y.Reference = yReference;
            return(KhivaArray.Create(result));
        }
Пример #27
0
        /// <summary>
        /// Calculates the N best matches of several queries in several time series.
        /// The result has the following structure:
        ///  - 1st dimension corresponds to the nth best match.
        ///  - 2nd dimension corresponds to the number of queries.
        ///  - 3rd dimension corresponds to the number of time series.
        ///
        /// For example, the distance in the position (1, 2, 3) corresponds to the second best distance of the third query in the
        /// fourth time series. The index in the position (1, 2, 3) is the is the index of the subsequence which leads to the
        /// second best distance of the third query in the fourth time series.
        /// </summary>
        /// <param name="query">Array whose first dimension is the length of the query time series and the second dimension
        /// is the number of queries.</param>
        /// <param name="tss">Array whose first dimension is the length of the time series and the second dimension is the
        /// number of time series.</param>
        /// <param name="n">Number of matches to return.</param>
        /// <returns>Tuple with the resulting distances and indexes.</returns>
        public static Tuple <KhivaArray, KhivaArray> FindBestNOccurrences(KhivaArray query, KhivaArray tss, long n)
        {
            var q = query.Reference;
            var t = tss.Reference;

            DLLMatrix.find_best_n_occurrences(ref q, ref t, ref n, out var distances, out var indexes);
            query.Reference = q;
            tss.Reference   = t;
            return(Tuple.Create(KhivaArray.Create(distances),
                                KhivaArray.Create(indexes)));
        }
Пример #28
0
        /// <summary>
        /// Calculates the k-shape algorithm.
        ///
        /// [1] John Paparrizos and Luis Gravano. 2016. k-Shape: Efficient and Accurate Clustering of Time Series.
        /// SIGMOD Rec. 45, 1 (June 2016), 69-76.
        /// </summary>
        /// <param name="arr">Expects an input array whose dimension zero is the length of the time series (all the same) and
        ///                       dimension one indicates the number of time series.</param>
        /// <param name="k">The number of means to be computed.</param>
        /// <param name="tolerance">The error tolerance to stop the computation of the centroids.</param>
        /// <param name="maxIterations">The maximum number of iterations allowed.</param>
        /// <returns> Tuple with the resulting means or centroids and the resulting labels of each time series which is the closest centroid.
        /// </returns>
        public static Tuple <KhivaArray, KhivaArray> KShape(KhivaArray arr, int k, float tolerance = 1e-10F,
                                                            int maxIterations = 100)
        {
            var reference = arr.Reference;

            DLLClustering.k_shape(ref reference, ref k, out var centroids, out var labels, ref tolerance,
                                  ref maxIterations);
            arr.Reference = reference;
            var tuple = Tuple.Create(KhivaArray.Create(centroids), KhivaArray.Create(labels));

            return(tuple);
        }
Пример #29
0
        /// <summary>
        /// Primitive of the STOMP self join algorithm.
        ///
        /// [1] Yan Zhu, Zachary Zimmerman, Nader Shakibay Senobari, Chin-Chia Michael Yeh, Gareth Funning, Abdullah Mueen,
        /// Philip Brisk and Eamonn Keogh (2016). Matrix Profile II: Exploiting a Novel Algorithm and GPUs to break the one
        /// Hundred Million Barrier for Time Series Motifs and Joins. IEEE ICDM 2016.
        /// </summary>
        /// <param name="tss">Query and reference time series.</param>
        /// <param name="m">Pointer to a long with the length of the subsequence.</param>
        /// <returns>Tuple with the matrix profile, which has the distance to the closer element of the subsequence from
        /// 'tss' in a different location of itself and the matrix profile index, which points to where the aforementioned
        ///  minimum is located.</returns>
        public static Tuple <KhivaArray, KhivaArray> StompSelfJoin(KhivaArray tss, long m)
        {
            var reference = tss.Reference;

            DLLMatrix.stomp_self_join(ref reference, ref m,
                                      out var p, out var i);
            tss.Reference = reference;
            var tuple = Tuple.Create(KhivaArray.Create(p),
                                     KhivaArray.Create(i));

            return(tuple);
        }
Пример #30
0
        /// <summary>
        /// Primitive of the STOMP algorithm.
        ///
        /// [1] Yan Zhu, Zachary Zimmerman, Nader Shakibay Senobari, Chin-Chia Michael Yeh, Gareth Funning, Abdullah Mueen,
        /// Philip Brisk and Eamonn Keogh (2016). Matrix Profile II: Exploiting a Novel Algorithm and GPUs to break the one
        /// Hundred Million Barrier for Time Series Motifs and Joins. IEEE ICDM 2016.
        /// </summary>
        /// <param name="tssa">Query time series.</param>
        /// <param name="tssb">Reference time series.</param>
        /// <param name="m">Pointer to a long with the length of the subsequence.</param>
        /// <returns>Tuple with the matrix profile, which has the distance to the closer element of the subsequence from
        /// 'tssa' in 'tssb' and the matrix profile index, which points to where the aforementioned minimum is located.</returns>
        public static Tuple <KhivaArray, KhivaArray> Stomp(KhivaArray tssa, KhivaArray tssb, long m)
        {
            var aReference = tssa.Reference;
            var bReference = tssb.Reference;

            DLLMatrix.stomp(ref aReference, ref bReference, ref m,
                            out var p, out var i);
            tssa.Reference = aReference;
            tssb.Reference = bReference;
            var tuple = Tuple.Create(KhivaArray.Create(p),
                                     KhivaArray.Create(i));

            return(tuple);
        }