/// <summary>Constructs a QR decomposition.</summary> /// <param name="value">The matrix A to be decomposed.</param> /// <param name="transpose">True if the decomposition should be performed on /// the transpose of A rather than A itself, false otherwise. Default is false.</param> /// <param name="inPlace">True if the decomposition should be done in place, /// overriding the given matrix <paramref name="value"/>. Default is false.</param> /// <param name="economy">True to perform the economy decomposition, where only ///.the information needed to solve linear systems is computed. If set to false, /// the full QR decomposition will be computed.</param> public JaggedQrDecompositionF(Single[][] value, bool transpose = false, bool economy = true, bool inPlace = false) { if (value == null) { throw new ArgumentNullException("value", "Matrix cannot be null."); } if ((!transpose && value.Length < value[0].Length) || (transpose && value[0].Length < value.Length)) { throw new ArgumentException("Matrix has more columns than rows.", "value"); } // https://www.inf.ethz.ch/personal/gander/papers/qrneu.pdf if (transpose) { this.p = value.Rows(); if (economy) { // Compute the faster, economy-sized QR decomposition this.qr = value.Transpose(inPlace: inPlace); } else { // Create room to store the full decomposition this.qr = Jagged.Create(value.Columns(), value.Columns(), value, transpose: true); } } else { this.p = value.Columns(); if (economy) { // Compute the faster, economy-sized QR decomposition this.qr = inPlace ? value : value.Copy(); } else { // Create room to store the full decomposition this.qr = Jagged.Create(value.Rows(), value.Rows(), value, transpose: false); } } this.economy = economy; this.n = qr.Rows(); this.m = qr.Columns(); this.Rdiag = new Single[m]; for (int k = 0; k < m; k++) { // Compute 2-norm of k-th column without under/overflow. Single nrm = 0; for (int i = k; i < qr.Length; i++) { nrm = Tools.Hypotenuse(nrm, qr[i][k]); } if (nrm != 0) { // Form k-th Householder vector. if (qr[k][k] < 0) { nrm = -nrm; } for (int i = k; i < qr.Length; i++) { qr[i][k] /= nrm; } qr[k][k] += 1; // Apply transformation to remaining columns. for (int j = k + 1; j < m; j++) { Single s = 0; for (int i = k; i < qr.Length; i++) { s += qr[i][k] * qr[i][j]; } s = -s / qr[k][k]; for (int i = k; i < qr.Length; i++) { qr[i][j] += s * qr[i][k]; } } } this.Rdiag[k] = -nrm; } }
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public MultinomialLogisticRegression Learn(double[][] x, int[] y, double[] weights = null) { return(Learn(x, Jagged.OneHot(y), weights)); }
/// <summary> /// Computes the loss between the expected values (ground truth) /// and the given actual values that have been predicted. /// </summary> /// <param name="actual">The actual values that have been predicted.</param> /// <returns> /// The loss value between the expected values and /// the actual predicted values. /// </returns> public double Loss(int[] actual) { return(Loss(Jagged.OneHot(actual))); }
public double[][] Generate(int samples, Random source) { return(Generate(samples, Jagged.Create <double>(samples, dimension), source)); }
/// <summary> /// Learns a model that can map the given inputs to the desired outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="weights">The weight of importance for each input sample.</param> /// <returns> /// A model that has learned how to produce suitable outputs /// given the input data <paramref name="x" />. /// </returns> public MultivariateKernelRegression Learn(double[][] x, double[] weights = null) { this.sourceCentered = null; this.StandardDeviations = null; this.featureMean = null; this.featureGrandMean = 0; double[][] K; if (Method == PrincipalComponentMethod.KernelMatrix) { K = x; if (centerFeatureSpace) // Center the Gram (Kernel) Matrix if requested { K = Accord.Statistics.Kernels.Kernel.Center(K, out featureMean, out featureGrandMean); // do not overwrite } } else { this.NumberOfInputs = x.Columns(); this.Means = x.Mean(dimension: 0); this.sourceCentered = Overwrite ? x : Jagged.CreateAs(x); x.Subtract(Means, dimension: 0, result: sourceCentered); if (Method == PrincipalComponentMethod.Standardize) { this.StandardDeviations = x.StandardDeviation(Means); sourceCentered.Divide(StandardDeviations, dimension: 0, result: sourceCentered); } // Create the Gram (Kernel) Matrix K = kernel.ToJagged(x: sourceCentered); if (centerFeatureSpace) // Center the Gram (Kernel) Matrix if requested { K = Accord.Statistics.Kernels.Kernel.Center(K, out featureMean, out featureGrandMean, result: K); // overwrite } } // Perform the Eigenvalue Decomposition (EVD) of the Kernel matrix var evd = new JaggedEigenvalueDecomposition(K, assumeSymmetric: true, sort: true); // Gets the Eigenvalues and corresponding Eigenvectors int numberOfSamples = x.Length; double[] evals = evd.RealEigenvalues; double[][] eigs = evd.Eigenvectors; int nonzero = evd.Rank; if (NumberOfInputs != 0) { nonzero = Math.Min(nonzero, NumberOfInputs); } if (NumberOfOutputs != 0) { nonzero = Math.Min(nonzero, NumberOfOutputs); } // Eliminate unwanted components eigs = eigs.Get(null, 0, nonzero); evals = evals.Get(0, nonzero); // Normalize eigenvectors if (centerFeatureSpace) { eigs.Divide(evals.Sqrt(), dimension: 0, result: eigs); } if (Whiten) { eigs.Divide(evals.Sqrt(), dimension: 0, result: eigs); } //this.Eigenvalues = evals.Divide(numberOfSamples - 1); this.Eigenvalues = evals; this.SingularValues = evals.Divide(numberOfSamples - 1).Sqrt(); this.ComponentVectors = eigs.Transpose(); if (allowReversion) { // Project the original data into principal component space this.result = Matrix.Dot(K, eigs).ToMatrix(); } // Computes additional information about the analysis and creates the // object-oriented structure to hold the principal components found. CreateComponents(); Accord.Diagnostics.Debug.Assert(NumberOfOutputs > 0); return(CreateRegression()); }
double[][] IClassifier <TInput, double[]> .Decide(TInput[] input, double[][] result) { return(Jagged.OneHot <double>(Decide(input), result)); }
/// <summary> /// Computes the trace of the inverse of the decomposed matrix. /// </summary> /// /// <param name="destroy">True to conserve memory by reusing the /// same space used to hold the decomposition, thus destroying /// it in the process. Pass false otherwise.</param> /// public Decimal InverseTrace(bool destroy = false) { if (!robust && !positiveDefinite) { throw new NonPositiveDefiniteMatrixException("Matrix is not positive definite."); } if (destroyed) { throw new InvalidOperationException("The decomposition has been destroyed."); } if (undefined) { throw new InvalidOperationException("The decomposition is undefined (zero in diagonal)."); } Decimal[][] S; if (destroy) { S = L; destroyed = true; } else { S = Jagged.Zeros <Decimal>(n, n); } // References: // http://books.google.com/books?id=myzIPBwyBbcC&pg=PA119 // Compute the inverse S of the lower triangular matrix L // and store in place of the upper triangular part of S. for (int j = n - 1; j >= 0; j--) { S[j][j] = 1 / L[j][j]; for (int i = j - 1; i >= 0; i--) { Decimal sum = 0; for (int k = i + 1; k <= j; k++) { sum += L[k][i] * S[k][j]; } S[i][j] = -sum / L[i][i]; } } // Compute the 2-norm squared of the rows // of the upper (right) triangular matrix S. Decimal trace = 0; if (robust) { for (int i = 0; i < S.Length; i++) { for (int j = i; j < S[i].Length; j++) { trace += S[i][j] * S[i][j] / D[j]; } } } else { for (int i = 0; i < S.Length; i++) { for (int j = i; j < S[i].Length; j++) { trace += S[i][j] * S[i][j]; } } } return(trace); }
int[][] ITransform <string[], int[]> .Transform(string[][] input) { return(Transform(input, Jagged.Zeros <int>(input.Length, NumberOfWords))); }
/// <summary> /// Solves a linear equation system of the form AX = B. /// </summary> /// <param name="value">Parameter B from the equation AX = B.</param> /// <returns>The solution X from equation AX = B.</returns> public Single[][] Solve(Single[][] value) { // Additionally an important property is that if there does not exists a solution // when the matrix A is singular but replacing 1/Li with 0 will provide a solution // that minimizes the residue |AX -Y|. SVD finds the least squares best compromise // solution of the linear equation system. Interestingly SVD can be also used in an // over-determined system where the number of equations exceeds that of the parameters. // L is a diagonal matrix with non-negative matrix elements having the same // dimension as A, Wi ? 0. The diagonal elements of L are the singular values of matrix A. Single[][] Y = value; // Create L*, which is a diagonal matrix with elements // L*[i] = 1/L[i] if L[i] < e, else 0, // where e is the so-called singularity threshold. // In other words, if L[i] is zero or close to zero (smaller than e), // one must replace 1/L[i] with 0. The value of e depends on the precision // of the hardware. This method can be used to solve linear equations // systems even if the matrices are singular or close to singular. //singularity threshold Single e = this.Threshold; int scols = s.Length; var Ls = new Single[scols][]; for (int i = 0; i < s.Length; i++) { Ls[i] = new Single[scols]; if (System.Math.Abs(s[i]) <= e) { Ls[i][i] = 0; } else { Ls[i][i] = 1 / s[i]; } } //(V x L*) x Ut x Y var VL = Matrix.Dot(v, Ls); //(V x L* x Ut) x Y int vrows = v.Rows(); int urows = u.Rows(); int ucols = u.Columns(); var VLU = Jagged.Create <Single>(vrows, urows); for (int i = 0; i < vrows; i++) { for (int j = 0; j < urows; j++) { Single sum = 0; for (int k = 0; k < ucols; k++) { sum += VL[i][k] * u[j][k]; } VLU[i][j] = sum; } } //(V x L* x Ut x Y) return(Matrix.Dot(VLU, Y)); }
public static double[][] Expand(int[] labels, int classes) { return(Jagged.OneHot(labels, classes)); }
public static double[][] Expand(int[] labels, int classes, double negative, double positive) { return(Jagged.OneHot(labels, classes).Replace(0, negative).Replace(1, positive)); }
public static double[][] Expand(int[] labels) { return(Jagged.OneHot(labels, labels.DistinctCount())); }
double[][] ISampleableDistribution <double[]> .Generate(int samples, Random source) { return(Generate(samples, Jagged.Create <double>(samples, dimension), source)); }
double[][] IRandomNumberGenerator <double[]> .Generate(int samples) { return(Generate(samples, Jagged.Create <double>(samples, dimension))); }
/// <summary> /// Solves a set of equation systems of type <c>A * X = I</c>. /// </summary> /// public Single[][] Inverse() { return(Solve(Jagged.Identity <Single>(n))); }
/// <summary> /// Applies the transformation to a set of input vectors, /// producing an associated set of output vectors. /// </summary> /// <param name="input">The input data to which /// the transformation should be applied.</param> /// <returns>The output generated by applying this /// transformation to the given input.</returns> public double[][] Transform(TInput[][] input) { return(Transform(input, Jagged.Create <double>(input.Length, NumberOfWords))); }
int[][] IClassifier <TInput, int[]> .Decide(TInput[] input, int[][] result) { return(Jagged.OneHot <int>(Decide(input), result)); }
int[][] ITransform <TInput[], int[]> .Transform(TInput[][] input) { return(Transform(input, Jagged.Create <int>(input.Length, NumberOfWords))); }
/// <summary> /// Solves a set of equation systems of type <c>A * X = I</c>. /// </summary> /// public Decimal[][] Inverse() { return(Solve(Jagged.Identity <Decimal>(n))); }
internal T[][] create <T>(TInput[] input) { return(Jagged.Create <T>(input.Length, NumberOfOutputs)); }
/// <summary> /// Initializes a new instance of the <see cref="BinaryCrossEntropyLoss"/> class. /// </summary> /// <param name="expected">The expected outputs (ground truth).</param> public BinaryCrossEntropyLoss(double[] expected) : this(Jagged.ColumnVector(expected)) { }
/// <summary> /// Initializes a new instance of the <see cref="Munkres"/> class. /// </summary> /// /// <param name="numberOfJobs">The number of jobs (tasks) that can be assigned.</param> /// <param name="numberOfWorkers">The number of workers that can receive an assignment.</param> /// public Munkres(int numberOfJobs, int numberOfWorkers) { init(Jagged.Zeros(numberOfWorkers, numberOfJobs)); }
/// <summary> /// Initializes a new instance of the <see cref="SquareLoss" /> class. /// </summary> /// <param name="expected">The expected outputs (ground truth).</param> public SquareLoss(double[] expected) { Expected = Jagged.ColumnVector(expected); }
/// <summary> /// Applies the transformation to a set of input vectors, /// producing an associated set of output vectors. /// </summary> /// <param name="input">The input data to which /// the transformation should be applied.</param> /// <returns>The output generated by applying this /// transformation to the given input.</returns> public double[][] Transform(string[][] input) { return(Transform(input, Jagged.Zeros(input.Length, NumberOfWords))); }
/// <summary> /// Reverts a set of projected data into it's original form. Complete reverse /// transformation is not always possible and is not even guaranteed to exist. /// </summary> /// /// <remarks> /// <para> /// This method works using a closed-form MDS approach as suggested by /// Kwok and Tsang. It is currently a direct implementation of the algorithm /// without any kind of optimization. /// </para> /// <para> /// Reference: /// - http://cmp.felk.cvut.cz/cmp/software/stprtool/manual/kernels/preimage/list/rbfpreimg3.html /// </para> /// </remarks> /// /// <param name="data">The kpca-transformed data.</param> /// <param name="neighbors">The number of nearest neighbors to use while constructing the pre-image.</param> /// public double[][] Revert(double[][] data, int neighbors = 10) { if (data == null) { throw new ArgumentNullException("data"); } if (sourceCentered == null) { throw new InvalidOperationException("The analysis must have been computed first."); } if (neighbors < 2) { throw new ArgumentOutOfRangeException("neighbors", "At least two neighbors are necessary."); } // Verify if the current kernel supports // distance calculation in feature space. var distance = kernel as IReverseDistance; if (distance == null) { throw new NotSupportedException( "Current kernel does not support distance calculation in feature space."); } int rows = data.Rows(); var result = this.result; double[][] reversion = Jagged.Zeros(rows, sourceCentered.Columns()); // number of neighbors cannot exceed the number of training vectors. int nn = System.Math.Min(neighbors, sourceCentered.Rows()); // For each point to be reversed for (int p = 0; p < rows; p++) { // 1. Get the point in feature space double[] y = data.GetRow(p); // 2. Select nn nearest neighbors of the feature space double[][] X = sourceCentered; double[] d2 = new double[result.GetLength(0)]; int[] inx = new int[result.GetLength(0)]; // 2.1 Calculate distances for (int i = 0; i < X.GetLength(0); i++) { inx[i] = i; d2[i] = distance.ReverseDistance(y, result.GetRow(i).First(y.Length)); if (Double.IsNaN(d2[i])) { d2[i] = Double.PositiveInfinity; } } // 2.2 Order them Array.Sort(d2, inx); // 2.3 Select nn neighbors int def = 0; for (int i = 0; i < d2.Length && i < nn; i++, def++) { if (Double.IsInfinity(d2[i])) { break; } } inx = inx.First(def); X = X.Get(inx).Transpose(); // X is in input space d2 = d2.First(def); // distances in input space // 3. Perform SVD // [U,L,V] = svd(X*H); // TODO: If X has more columns than rows, the SV decomposition should be // computed on the transpose of X and the left and right vectors should // be swapped. This should be fixed after more unit tests are elaborated. var svd = new JaggedSingularValueDecomposition(X, computeLeftSingularVectors: true, computeRightSingularVectors: true, autoTranspose: false); double[][] U = svd.LeftSingularVectors; double[][] L = Jagged.Diagonal(def, svd.Diagonal); double[][] V = svd.RightSingularVectors; // 4. Compute projections // Z = L*V'; double[][] Z = Matrix.DotWithTransposed(L, V); // 5. Calculate distances // d02 = sum(Z.^2)'; double[] d02 = Matrix.Sum(Elementwise.Pow(Z, 2), 0); // 6. Get the pre-image using // z = -0.5*inv(Z')*(d2-d02) double[][] inv = Matrix.PseudoInverse(Z.Transpose()); double[] w = (-0.5).Multiply(inv).Dot(d2.Subtract(d02)); double[] z = w.First(U.Columns()); // 8. Project the pre-image on the original basis using // x = U*z + sum(X,2)/nn; double[] x = (U.Dot(z)).Add(Matrix.Sum(X.Transpose(), 0).Multiply(1.0 / nn)); // 9. Store the computed pre-image. for (int i = 0; i < reversion.Columns(); i++) { reversion[p][i] = x[i]; } } // if the data has been standardized or centered, // we need to revert those operations as well if (this.Method == PrincipalComponentMethod.Standardize) { // multiply by standard deviation and add the mean reversion.Multiply(StandardDeviations, dimension: 0, result: reversion) .Add(Means, dimension: 0, result: reversion); } else if (this.Method == PrincipalComponentMethod.Center) { // only add the mean reversion.Add(Means, dimension: 0, result: reversion); } return(reversion); }
private void btnRunAnalysis_Click(object sender, EventArgs e) { if (dgvAnalysisSource.Rows.Count == 0) { MessageBox.Show("Please load the training data before clicking this button"); return; } lbStatus.Text = "Gathering data. This may take a while..."; Application.DoEvents(); // Extract inputs and outputs int rows = dgvAnalysisSource.Rows.Count; double[][] input = Jagged.Zeros(rows, 32 * 32); int[] output = new int[rows]; for (int i = 0; i < rows; i++) { input.SetRow(i, (double[])dgvAnalysisSource.Rows[i].Cells["colTrainingFeatures"].Value); output[i] = (int)dgvAnalysisSource.Rows[i].Cells["colTrainingLabel"].Value; } // Create the chosen Kernel with given parameters IKernel kernel; if (rbGaussian.Checked) { kernel = new Gaussian((double)numSigma.Value); } else { kernel = new Polynomial((int)numDegree.Value, (double)numConstant.Value); } // Create the Kernel Discriminant Analysis using the selected Kernel kda = new KernelDiscriminantAnalysis(kernel) { Threshold = (double)numThreshold.Value, Regularization = (double)numRegularization.Value }; lbStatus.Text = "Computing the analysis. This may take a significant amount of time..."; Application.DoEvents(); // Compute the analysis. kda.Learn(input, output); // Show information about the analysis in the form dgvPrincipalComponents.DataSource = kda.Discriminants; dgvFeatureVectors.DataSource = new ArrayDataView(kda.DiscriminantVectors); dgvClasses.DataSource = kda.Classes; // Create the component graphs distributionView.DataSource = kda.Discriminants; cumulativeView.DataSource = kda.Discriminants; lbStatus.Text = "Analysis complete. Click Classify to test the analysis."; btnClassify.Enabled = true; }
/// <summary> /// Applies the transformation to an input, producing an associated output. /// </summary> /// <param name="input">The input data to which the transformation should be applied.</param> /// <returns> /// The output generated by applying this transformation to the given input. /// </returns> public override double[][] Transform(double[][] input) { return(Transform(input, Jagged.Zeros(input.Length, NumberOfOutputs))); }
/// <summary> /// Learns a model that can map the given inputs to the desired outputs. /// </summary> /// /// <param name="x">The model inputs.</param> /// <param name="weights">The weight of importance for each input sample.</param> /// /// <returns> /// A model that has learned how to produce suitable outputs /// given the input data <paramref name="x" />. /// </returns> /// public MultivariateLinearRegression Learn(double[][] x, double[] weights = null) { this.NumberOfInputs = x.Columns(); if (Method == PrincipalComponentMethod.Center || Method == PrincipalComponentMethod.Standardize) { this.Means = x.Mean(dimension: 0); double[][] matrix = Overwrite ? x : Jagged.CreateAs(x); x.Subtract(Means, dimension: 0, result: matrix); if (Method == PrincipalComponentMethod.Standardize) { this.StandardDeviations = x.StandardDeviation(Means); x.Divide(StandardDeviations, dimension: 0, result: matrix); } // The principal components of 'Source' are the eigenvectors of Cov(Source). Thus if we // calculate the SVD of 'matrix' (which is Source standardized), the columns of matrix V // (right side of SVD) will be the principal components of Source. // Perform the Singular Value Decomposition (SVD) of the matrix var svd = new JaggedSingularValueDecomposition(matrix, computeLeftSingularVectors: false, computeRightSingularVectors: true, autoTranspose: true, inPlace: true); SingularValues = svd.Diagonal; Eigenvalues = SingularValues.Pow(2); Eigenvalues.Divide(x.Rows() - 1, result: Eigenvalues); ComponentVectors = svd.RightSingularVectors.Transpose(); } else if (Method == PrincipalComponentMethod.CovarianceMatrix || Method == PrincipalComponentMethod.CorrelationMatrix) { // We only have the covariance matrix. Compute the Eigenvalue decomposition var evd = new JaggedEigenvalueDecomposition(x, assumeSymmetric: true, sort: true); // Gets the Eigenvalues and corresponding Eigenvectors Eigenvalues = evd.RealEigenvalues; SingularValues = Eigenvalues.Sqrt(); ComponentVectors = evd.Eigenvectors.Transpose(); } else { // The method type should have been validated before we even entered this section throw new InvalidOperationException("Invalid method, this should never happen: {0}".Format(Method)); } if (Whiten) { ComponentVectors.Divide(SingularValues, dimension: 1, result: ComponentVectors); } // Computes additional information about the analysis and creates the // object-oriented structure to hold the principal components found. CreateComponents(); return(CreateRegression()); }
/// <summary> /// Initializes a new instance of the <see cref="BinaryCrossEntropyLoss" /> class. /// </summary> /// <param name="expected">The expected outputs (ground truth).</param> public BinaryCrossEntropyLoss(double[] expected) { Expected = Jagged.ColumnVector(Classes.Decide(expected)); }
/// <summary> /// Generates a random vector of observations from the current distribution. /// </summary> /// /// <param name="samples">The number of samples to generate.</param> /// <param name="dimension">The number of dimensions in the n-dimensional sphere.</param> /// /// <returns>A random vector of observations drawn from this distribution.</returns> /// public static double[][] Random(int samples, int dimension) { return(Random(samples, dimension, Jagged.Zeros(samples, dimension))); }